diff --git a/.github/workflows/dead-link-checker.yml b/.github/workflows/dead-link-checker.yml index 79ddae899..8de24aac8 100644 --- a/.github/workflows/dead-link-checker.yml +++ b/.github/workflows/dead-link-checker.yml @@ -1,6 +1,6 @@ name: Dead Link Check -on: [push, pull_request] +on: [push] jobs: dead-links-check: diff --git a/Dockerfile b/Dockerfile index 294e26a9e..50fd364a9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,8 @@ FROM harbor.local.hching.com/library/jdk:8u301 -ADD assembly-package/target/wedatasphere-exchangis-1.0.0-RC1.tar.gz /opt/wedatasphere-exchangis.tar.gz +ADD assembly-package/target/wedatasphere-exchangis-1.1.2.tar.gz /opt/wedatasphere-exchangis.tar.gz -RUN cd /opt/wedatasphere-exchangis.tar.gz/packages/ && tar -zxf exchangis-server_1.0.0-RC1.tar.gz && cd /opt/wedatasphere-exchangis.tar.gz/sbin +RUN cd /opt/wedatasphere-exchangis.tar.gz/packages/ && tar -zxf exchangis-server_1.1.2.tar.gz && cd /opt/wedatasphere-exchangis.tar.gz/sbin WORKDIR /opt/wedatasphere-exchangis.tar.gz/sbin diff --git a/README-ZH.md b/README-ZH.md index 3f348efa9..2264648a1 100644 --- a/README-ZH.md +++ b/README-ZH.md @@ -6,7 +6,7 @@ ## 介绍 -Exchangis 1.0.0 是微众银行大数据平台 WeDataSphere 与社区用户共同研发的的新版数据交换工具,支持异构数据源之间的结构化和非结构化数据传输同步。 +Exchangis是微众银行大数据平台 WeDataSphere 与社区用户共同研发的的新版数据交换工具,支持异构数据源之间的结构化和非结构化数据传输同步。 Exchangis 抽象了一套统一的数据源和同步作业定义插件,允许用户快速接入新的数据源,并只需在数据库中简单配置即可在页面中使用。 @@ -16,7 +16,7 @@ Exchangis 抽象了一套统一的数据源和同步作业定义插件,允许 ### 界面预览 -![image](https://user-images.githubusercontent.com/27387830/171488936-2cea3ee9-4ef7-4309-93e1-e3b697bd3be1.png) +![image](images/zh_CN/ch1/frontend_view.png) ## 核心特点 @@ -42,19 +42,22 @@ Exchangis 抽象了一套统一的数据源和同步作业定义插件,允许 ### 3. 与DSS工作流打通,一站式大数据开发的门户 - 实现DSS AppConn包括一级 SSO 规范,二级组织结构规范,三级开发流程规范在内的三级规范; - - 作为DSS工作流的数据交换节点,是整个工作流链路中的门户流程,为后续的工作流节点运行提供稳固的数据基础; +### 4. 支持多种导数引擎 + +- 支持Sqoop和DataX引擎进行多种异构数据源之间的导数 + ## 整体设计 ### 架构设计 -![架构设计](https://user-images.githubusercontent.com/27387830/173026793-f1475803-9f85-4478-b566-1ad1d002cd8a.png) +![架构设计](images/zh_CN/ch1/home_page_zh.png) ## 相关文档 -[安装部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_deploy_cn.md) -[用户手册](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_user_manual_cn.md) +[安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md) +[用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) ## 交流贡献 diff --git a/README.md b/README.md index 1c9d4fb0a..889504b5a 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ English | [中文](README-ZH.md) ## Introduction -Exchangis 1.0.0 is a new version of data exchange tool jointly developed by WeDataSphere, a big data platform of WeBank, and community users, which supports the synchronization of structured and unstructured data transmission between heterogeneous data sources. +Exchangis is a new version of data exchange tool jointly developed by WeDataSphere, a big data platform of WeBank, and community users, which supports the synchronization of structured and unstructured data transmission between heterogeneous data sources. Exchangis abstracts a unified set of data source and synchronization job definition plugins, allowing users to quickly access new data sources and use them on pages with simple configuration in the database. @@ -14,7 +14,7 @@ With the help of [Linkis](https://github.com/apache/incubator-linkis) computing ### Interface preview -![image](https://user-images.githubusercontent.com/27387830/171488936-2cea3ee9-4ef7-4309-93e1-e3b697bd3be1.png) +![image](images/zh_CN/ch1/frontend_view.png) ## Core characteristics @@ -47,13 +47,13 @@ With the help of [Linkis](https://github.com/apache/incubator-linkis) computing ### Architecture Design -![架构设计](images/en_US/ch1/architecture.png) +![架构设计](images/zh_CN/ch1/home_page_en.png) ## Documents -[Quick Deploy](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_deploy_en.md) -[User Manual](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_user_manual_en.md) +[Quick Deploy](docs/en_US/ch1/exchangis_deploy_en.md) +[User Manual](docs/en_US/ch1/exchangis_user_manual_en.md) ## Communication and contribution diff --git a/assembly-package/config/exchangis-server.properties b/assembly-package/config/dss-exchangis-server.properties similarity index 75% rename from assembly-package/config/exchangis-server.properties rename to assembly-package/config/dss-exchangis-server.properties index dfba5255e..a8aa9a830 100644 --- a/assembly-package/config/exchangis-server.properties +++ b/assembly-package/config/dss-exchangis-server.properties @@ -15,14 +15,15 @@ # # -wds.linkis.server.mybatis.datasource.url=jdbc:mysql://{IP}:{PORT}/{database}?useSSL=false&characterEncoding=UTF-8&allowMultiQueries=true - +wds.linkis.test.mode=false +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://{IP}:{PORT}/{database}?useSSL=false&characterEncoding=UTF-8&allowMultiQueries=true&useAffectedRows=true wds.linkis.server.mybatis.datasource.username={username} - wds.linkis.server.mybatis.datasource.password={password} +wds.linkis.gateway.ip={LINKIS_IP} +wds.linkis.gateway.port={LINKIS_PORT} +wds.linkis.gateway.url=http://{LINKIS_IP}:{LINKIS_PORT}/ wds.linkis.log.clear=true - wds.linkis.server.version=v1 # datasource client @@ -34,28 +35,33 @@ wds.exchangis.datasource.client.dws.version=v1 # launcher client wds.exchangis.client.linkis.server-url=http://{LINKIS_IP}:{LINKIS_PORT}/ wds.exchangis.client.linkis.token.value=EXCHANGIS-AUTH -wds.exchangis.datasource.extension.dir=exchangis-extds +wds.exchangis.datasource.extension.dir=exchangis-extds/ ##restful wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.exchangis.datasource.server.restful.api,\ com.webank.wedatasphere.exchangis.project.server.restful,\ com.webank.wedatasphere.exchangis.job.server.restful -wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/framework/appconn/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/workflow/dao/impl/*.xml,\ -classpath*:com/webank/wedatasphere/exchangis/job/server/mapper/impl/*.xml,\ -classpath*:com/webank/wedatasphere/exchangis/project/server/mapper/impl/*.xml + +wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/exchangis/job/server/mapper/impl/*.xml,\ +classpath*:com/webank/wedatasphere/exchangis/project/server/mapper/impl/*.xml,\ +classpath*:com/webank/wedatasphere/exchangis/project/provider/mapper/impl/*.xml,\ +classpath*:com/webank/wedatasphere/exchangis/engine/server/mapper/*.xml wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.exchangis.dao,\ com.webank.wedatasphere.exchangis.project.server.mapper,\ + com.webank.wedatasphere.exchangis.project.provider.mapper,\ com.webank.wedatasphere.linkis.configuration.dao,\ - com.webank.wedatasphere.dss.framework.appconn.dao,\ - com.webank.wedatasphere.dss.workflow.dao,\ com.webank.wedatasphere.linkis.metadata.dao,\ com.webank.wedatasphere.exchangis.job.server.mapper,\ - com.webank.wedatasphere.exchangis.job.server.dao + com.webank.wedatasphere.exchangis.job.server.dao,\ + com.webank.wedatasphere.exchangis.engine.dao wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.min-occupy=0.25 wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.max-occupy=0.5 #wds.exchangis.job.scheduler.group.max.running-jobs=4 wds.linkis-session.ticket.key=bdp-user-ticket-id +wds.exchangis.limit.interface.value=false +wds.exchangis.publicKeyStr= +wds.exchangis.privateKeyStr= diff --git a/assembly-package/config/exchangis.properties b/assembly-package/config/exchangis.properties deleted file mode 100644 index e69de29bb..000000000 diff --git a/assembly-package/config/log4j2.xml b/assembly-package/config/log4j2.xml index 70da2f238..121b48d1d 100644 --- a/assembly-package/config/log4j2.xml +++ b/assembly-package/config/log4j2.xml @@ -26,13 +26,22 @@ + filePattern="${sys:log.path}/$${date:yyyy-MM-dd}/${sys:serviceName}/exchangis-log-%d{yyyy-MM-dd}-%i.log"> + + + + + + + + diff --git a/assembly-package/config/application-eureka.yml b/assembly-package/config/transform-processor-templates/datax-processor.java similarity index 100% rename from assembly-package/config/application-eureka.yml rename to assembly-package/config/transform-processor-templates/datax-processor.java diff --git a/assembly-package/pom.xml b/assembly-package/pom.xml index 03f414f3c..45315a5fe 100644 --- a/assembly-package/pom.xml +++ b/assembly-package/pom.xml @@ -21,7 +21,7 @@ exchangis com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 assembly-package @@ -31,6 +31,7 @@ org.apache.maven.plugins maven-install-plugin + 2.4 true @@ -38,6 +39,7 @@ org.apache.maven.plugins maven-antrun-plugin + 1.3 package diff --git a/assembly-package/sbin/common.sh b/assembly-package/sbin/common.sh index 03d4e4666..8ee615b64 100644 --- a/assembly-package/sbin/common.sh +++ b/assembly-package/sbin/common.sh @@ -16,4 +16,4 @@ # declare -A MODULE_MAIN_CLASS -MODULE_MAIN_CLASS["exchangis-server"]="com.webank.wedatasphere.exchangis.server.boot.ExchangisServerApplication" +MODULE_MAIN_CLASS["dss-exchangis-main-server-dev"]="com.webank.wedatasphere.exchangis.server.boot.ExchangisServerApplication" diff --git a/assembly-package/sbin/daemon.sh b/assembly-package/sbin/daemon.sh index 40f64a78a..a21ccfab6 100644 --- a/assembly-package/sbin/daemon.sh +++ b/assembly-package/sbin/daemon.sh @@ -23,9 +23,8 @@ else source ./common.sh fi -MODULE_NAME="" usage(){ - echo "Usage is [start|stop|restart {service}]" + echo "Usage is [start|stop|restart {server}]" } start(){ @@ -41,7 +40,7 @@ stop(){ restart(){ launcher_stop $1 $2 if [[ $? -eq 0 ]]; then - sleep 2 + sleep 3 launcher_start $1 $2 fi } @@ -49,12 +48,14 @@ restart(){ COMMAND=$1 case $COMMAND in start|stop|restart) + load_env_definitions ${ENV_FILE} if [[ ! -z $2 ]]; then - MAIN_CLASS=${MODULE_MAIN_CLASS[${MODULE_DEFAULT_PREFIX}$2]} + SERVICE_NAME=${MODULE_DEFAULT_PREFIX}$2${MODULE_DEFAULT_SUFFIX} + MAIN_CLASS=${MODULE_MAIN_CLASS[${SERVICE_NAME}]} if [[ "x"${MAIN_CLASS} != "x" ]]; then - $COMMAND ${MODULE_DEFAULT_PREFIX}$2 ${MAIN_CLASS} + $COMMAND ${SERVICE_NAME} ${MAIN_CLASS} else - LOG ERROR "Cannot find the main class for [ ${MODULE_DEFAULT_PREFIX}$2 ]" + LOG ERROR "Cannot find the main class for [ ${SERVICE_NAME} ]" fi else usage diff --git a/assembly-package/sbin/env.properties b/assembly-package/sbin/env.properties index e69de29bb..f849b4fa9 100644 --- a/assembly-package/sbin/env.properties +++ b/assembly-package/sbin/env.properties @@ -0,0 +1,4 @@ +EXCHANGIS_CONF_PATH=/appcom/config/exchangis-config/background +EXCHANGIS_LOG_PATH=/appcom/logs/exchangis/background +MODULE_DEFAULT_PREFIX="dss-exchangis-main-" +MODULE_DEFAULT_SUFFIX="-dev" diff --git a/assembly-package/sbin/install.sh b/assembly-package/sbin/install.sh index 16f453870..a9f23aa66 100644 --- a/assembly-package/sbin/install.sh +++ b/assembly-package/sbin/install.sh @@ -123,57 +123,36 @@ interact_echo(){ done } +# Initalize database init_database(){ -BOOTSTRAP_PROP_FILE="${CONF_PATH}/exchangis-server.properties" -# Start to initalize database -if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then - `mysql --version >/dev/null 2>&1` - interact_echo "Do you want to initalize database with sql?" - if [ $? == 0 ]; then - LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m" - #interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?" - #if [ $? == 0 ]; then + BOOTSTRAP_PROP_FILE="${CONF_PATH}/dss-exchangis-server.properties" + if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then + `mysql --version >/dev/null 2>&1` DATASOURCE_URL="jdbc:mysql:\/\/${MYSQL_HOST}:${MYSQL_PORT}\/${DATABASE}\?useSSL=false\&characterEncoding=UTF-8\&allowMultiQueries=true" - mysql -h ${MYSQL_HOST} -P ${MYSQL_PORT} -u ${MYSQL_USERNAME} -p${MYSQL_PASSWORD} --default-character-set=utf8 -e \ - "CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};" - #sed -ri "s![#]?(DB_HOST=)\S*!\1${HOST}!g" ${BOOTSTRAP_PROP_FILE} - #sed -ri "s![#]?(DB_PORT=)\S*!\1${PORT}!g" ${BOOTSTRAP_PROP_FILE} sed -ri "s![#]?(wds.linkis.server.mybatis.datasource.username=)\S*!\1${MYSQL_USERNAME}!g" ${BOOTSTRAP_PROP_FILE} sed -ri "s![#]?(wds.linkis.server.mybatis.datasource.password=)\S*!\1${MYSQL_PASSWORD}!g" ${BOOTSTRAP_PROP_FILE} sed -ri "s![#]?(wds.linkis.server.mybatis.datasource.url=)\S*!\1${DATASOURCE_URL}!g" ${BOOTSTRAP_PROP_FILE} - #fi - fi -fi + interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?" + if [ $? == 0 ]; then + LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m" + mysql -h ${MYSQL_HOST} -P ${MYSQL_PORT} -u ${MYSQL_USERNAME} -p${MYSQL_PASSWORD} --default-character-set=utf8 -e \ + "CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};" + fi + fi } init_properties(){ -BOOTSTRAP_PROP_FILE="${CONF_PATH}/exchangis-server.properties" -APPLICATION_YML="${CONF_PATH}/application-exchangis.yml" -# Start to initalize propertis - #interact_echo "Do you want to initalize exchangis-server.properties?" - #if [ $? == 0 ]; then - - LINKIS_GATEWAY_URL="http:\/\/${LINKIS_GATEWAY_HOST}:${LINKIS_GATEWAY_PORT}\/" - - if [ "x${LINKIS_SERVER_URL}" == "x" ]; then - LINKIS_SERVER_URL="http://127.0.0.1:3306" - fi - if [ "x${LINKIS_SERVER_URL}" == "x" ]; then - LINKIS_SERVER_URL="http://127.0.0.1:3306" - fi + BOOTSTRAP_PROP_FILE="${CONF_PATH}/dss-exchangis-server.properties" + APPLICATION_YML="${CONF_PATH}/application-exchangis.yml" + LINKIS_GATEWAY_URL="http:\/\/${LINKIS_GATEWAY_HOST}:${LINKIS_GATEWAY_PORT}\/" + if [ "x${LINKIS_SERVER_URL}" == "x" ]; then + LINKIS_SERVER_URL="http://127.0.0.1:9001" + fi - sed -ri "s![#]?(wds.linkis.gateway.ip=)\S*!\1${LINKIS_GATEWAY_HOST}!g" ${BOOTSTRAP_PROP_FILE} - sed -ri "s![#]?(wds.linkis.gateway.port=)\S*!\1${LINKIS_GATEWAY_PORT}!g" ${BOOTSTRAP_PROP_FILE} - sed -ri "s![#]?(wds.linkis.gateway.url=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} - sed -ri "s![#]?(wds.exchangis.datasource.client.serverurl=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} - sed -ri "s![#]?(wds.exchangis.client.linkis.server-url=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} - #sed -ri "s![#]?(wds.exchangis.datasource.client.authtoken.key=)\S*!\1${LINKIS_TOKEN}!g" ${BOOTSTRAP_PROP_FILE} - #sed -ri "s![#]?(wds.exchangis.datasource.client.authtoken.value=)\S*!\1${LINKIS_TOKEN}!g" ${BOOTSTRAP_PROP_FILE} - #sed -ri "s![#]?(wds.exchangis.client.linkis.token.value=)\S*!\1${LINKIS_TOKEN}!g" ${BOOTSTRAP_PROP_FILE} - sed -ri "s![#]?(wds.linkis.gateway.port=)\S*!\1${LINKIS_GATEWAY_PORT}!g" ${BOOTSTRAP_PROP_FILE} - sed -ri "s![#]?(port: )\S*!\1${EXCHANGIS_PORT}!g" ${APPLICATION_YML} - sed -ri "s![#]?(defaultZone: )\S*!\1${EUREKA_URL}!g" ${APPLICATION_YML} - #fi + sed -ri "s![#]?(wds.exchangis.datasource.client.serverurl=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(wds.exchangis.client.linkis.server-url=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(port: )\S*!\1${EXCHANGIS_PORT}!g" ${APPLICATION_YML} + sed -ri "s![#]?(defaultZone: )\S*!\1${EUREKA_URL}!g" ${APPLICATION_YML} } install_modules(){ diff --git a/assembly-package/sbin/launcher.sh b/assembly-package/sbin/launcher.sh index 60bc62364..50a79d279 100644 --- a/assembly-package/sbin/launcher.sh +++ b/assembly-package/sbin/launcher.sh @@ -20,11 +20,8 @@ DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) ENV_FILE="${DIR}/env.properties" SHELL_LOG="${DIR}/command.log" USER_DIR="${DIR}/../" -EXCHANGIS_CONF_PATH="${DIR}/../config" EXCHANGIS_LIB_PATH="${DIR}/../lib" -EXCHANGIS_LOG_PATH="${DIR}/../logs" EXCHANGIS_PID_PATH="${DIR}/../runtime" -MODULE_DEFAULT_PREFIX="exchangis-" # Default MAIN_CLASS="" DEBUG_MODE=False @@ -32,8 +29,6 @@ DEBUG_PORT="7006" SPRING_PROFILE="exchangis" SLEEP_TIMEREVAL_S=2 -CONF_PATH=${DIR}/../config - function LOG(){ currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"` echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG} @@ -91,8 +86,6 @@ load_env_definitions(){ fi } - - construct_java_command(){ verify_java_env if [[ "x${EXCHANGIS_CONF_PATH}" == "x" ]]; then @@ -119,7 +112,8 @@ construct_java_command(){ mkdir -p ${EXCHANGIS_PID_PATH} local classpath=${EXCHANGIS_CONF_PATH}":." local opts="" - classpath=${EXCHANGIS_LIB_PATH}/$1/*":"${classpath} + classpath=${EXCHANGIS_LIB_PATH}/"exchangis-server/*:"${classpath} + LOG INFO "classpath:"${classpath} if [[ "x${EXCHANGIS_JAVA_OPTS}" == "x" ]]; then # Use G1 garbage collector local opts="-Xms${HEAP_SIZE} -Xmx${HEAP_SIZE} -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 -XX:+UseG1GC -Xloggc:${EXCHANGIS_LOG_PATH}/$1-gc.log" @@ -139,6 +133,7 @@ construct_java_command(){ opts=${opts}" -Dlogging.level.reactor.ipc.netty.channel.CloseableContextHandler=off" opts=${opts}" -Duser.dir=${USER_DIR}" opts=${opts}" -classpath "${classpath} + LOG INFO "opts:"${opts} if [[ "x${JAVA_HOME}" != "x" ]]; then EXEC_JAVA=${JAVA_HOME}"/bin/java "${opts}" "$2 else @@ -178,7 +173,7 @@ wait_for_startup(){ return 0 fi sleep ${SLEEP_TIMEREVAL_S} - now_s=`date '+%s'` #计算当前时间时间戳 + now_s=`date '+%s'` done return 1 } @@ -199,7 +194,6 @@ wait_for_stop(){ # Input: $1:module_name, $2:main class launcher_start(){ - load_env_definitions ${ENV_FILE} LOG INFO "Launcher: launch to start server [ $1 ]" status_class $1 $2 if [[ $? -eq 0 ]]; then @@ -208,14 +202,17 @@ launcher_start(){ fi construct_java_command $1 $2 # Execute + echo ${EXEC_JAVA} LOG INFO ${EXEC_JAVA} nohup ${EXEC_JAVA} >/dev/null 2>&1 & LOG INFO "Launcher: waiting [ $1 ] to start complete ..." wait_for_startup 20 $1 $2 if [[ $? -eq 0 ]]; then LOG INFO "Launcher: [ $1 ] start success" - APPLICATION_YML="${CONF_PATH}/application-exchangis.yml" + LOG INFO ${EXCHANGIS_CONF_PATH} + APPLICATION_YML="${EXCHANGIS_CONF_PATH}/application-exchangis.yml" EUREKA_URL=`cat ${APPLICATION_YML} | grep Zone | sed -n '1p'` + echo "${EUREKA_URL}" LOG INFO "Please check exchangis server in EUREKA_ADDRESS: ${EUREKA_URL#*:} " else LOG ERROR "Launcher: [ $1 ] start fail over 20 seconds, please retry it" @@ -224,7 +221,6 @@ launcher_start(){ # Input: $1:module_name, $2:main class launcher_stop(){ - load_env_definitions ${ENV_FILE} LOG INFO "Launcher: stop the server [ $1 ]" local p="" local pid_file_path=${EXCHANGIS_PID_PATH}/$1.pid @@ -247,11 +243,11 @@ launcher_stop(){ *) kill -SIGTERM "${p}" ;; esac LOG INFO "Launcher: waiting [ $1 ] to stop complete ..." - wait_for_stop 20 + wait_for_stop 20 $1 $2 if [[ $? -eq 0 ]]; then LOG INFO "Launcher: [ $1 ] stop success" else LOG ERROR "Launcher: [ $1 ] stop exceeded over 20s " >&2 return 1 fi -} \ No newline at end of file +} diff --git a/db/1.1.1/exchangis_ddl.sql b/db/1.1.1/exchangis_ddl.sql new file mode 100644 index 000000000..1002aa86b --- /dev/null +++ b/db/1.1.1/exchangis_ddl.sql @@ -0,0 +1,88 @@ +-- exchangis_job_func definition +DROP TABLE IF EXISTS `exchangis_job_func`; +CREATE TABLE `exchangis_job_func` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `func_type` varchar(50) NOT NULL, + `func_name` varchar(100) NOT NULL, + `tab_name` varchar(50) NOT NULL COMMENT 'Tab', + `name_dispaly` varchar(100) DEFAULT NULL, + `param_num` int(11) DEFAULT '0', + `ref_name` varchar(100) DEFAULT NULL, + `description` varchar(200) DEFAULT NULL, + `modify_time` datetime DEFAULT NULL, + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `job_func_tab_name_idx` (`tab_name`,`func_name`) +) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8; + +-- exchangis_job_func_params definition +DROP TABLE IF EXISTS `exchangis_job_func_params`; +CREATE TABLE IF NOT EXISTS `exchangis_job_func_params`( + `func_id` INT(11) NOT NULL, + `param_name` VARCHAR(100) NOT NULL, + `order` INT(11) DEFAULT 0, + `name_display` VARCHAR(100), + `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY(`func_id`, `param_name`) +)Engine=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_param_config definition +DROP TABLE IF EXISTS `exchangis_job_param_config`; +CREATE TABLE `exchangis_job_param_config` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `config_key` varchar(64) NOT NULL, + `config_name` varchar(64) NOT NULL, + `config_direction` varchar(16) DEFAULT NULL, + `type` varchar(32) NOT NULL, + `ui_type` varchar(32) DEFAULT NULL, + `ui_field` varchar(64) DEFAULT NULL, + `ui_label` varchar(32) DEFAULT NULL, + `unit` varchar(32) DEFAULT NULL, + `required` bit(1) DEFAULT b'0', + `value_type` varchar(32) DEFAULT NULL, + `value_range` varchar(255) DEFAULT NULL, + `default_value` varchar(255) DEFAULT NULL, + `validate_type` varchar(64) DEFAULT NULL, + `validate_range` varchar(64) DEFAULT NULL, + `validate_msg` varchar(255) DEFAULT NULL, + `is_hidden` bit(1) DEFAULT NULL, + `is_advanced` bit(1) DEFAULT NULL, + `source` varchar(255) DEFAULT NULL, + `level` tinyint(4) DEFAULT NULL, + `treename` varchar(32) DEFAULT NULL, + `sort` int(11) DEFAULT NULL, + `description` varchar(255) DEFAULT NULL, + `status` tinyint(4) DEFAULT NULL, + `ref_id` bigint(20) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=32 DEFAULT CHARSET=utf8; + +-- exchangis_engine_settings definition +DROP TABLE IF EXISTS `exchangis_engine_settings`; +CREATE TABLE `exchangis_engine_settings` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `engine_name` varchar(50) NOT NULL, + `engine_desc` varchar(500) NOT NULL, + `engine_settings_value` text, + `engine_direction` varchar(255) NOT NULL, + `res_loader_class` varchar(255), + `res_uploader_class` varchar(255), + `modify_time` datetime DEFAULT NULL, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `engine_setting_idx` (`engine_name`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_transform_rule +DROP TABLE IF EXISTS `exchangis_job_transform_rule`; +CREATE TABLE `exchangis_job_transform_rule` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `rule_name` varchar(100) NOT NULL DEFAULT 'transform_rule', + `rule_type` varchar(64) NOT NULL DEFAULT 'DEF', + `rule_source` varchar(600) DEFAULT '{}', + `data_source_type` varchar(64) NOT NULL, + `engine_type` varchar(32), + `direction` varchar(32) NOT NULL DEFAULT 'NONE', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; \ No newline at end of file diff --git a/db/1.1.1/exchangis_dml.sql b/db/1.1.1/exchangis_dml.sql new file mode 100644 index 000000000..3e546d667 --- /dev/null +++ b/db/1.1.1/exchangis_dml.sql @@ -0,0 +1,79 @@ +-- job_func records +INSERT INTO `exchangis_job_func`(func_type,func_name,tab_name,name_dispaly,param_num,ref_name,description,modify_time) VALUES +('TRANSFORM','dx_substr','DATAX',NULL,2,NULL,NULL,NULL) +,('TRANSFORM','dx_pad','DATAX',NULL,3,NULL,NULL,NULL) +,('TRANSFORM','dx_replace','DATAX',NULL,3,NULL,NULL,NULL) +,('VERIFY','like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','not like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','<','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','!=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('TRANSFORM','dx_precision','DATAX',NULL,1,NULL,NULL,NULL) +; + +-- job_func_params records +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padType', 'padType(r or l)', 0) ON DUPLICATE KEY UPDATE `name_display` = 'padType(r or l)'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padString', 'padString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'padString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'replaceString', 'replaceString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'replaceString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(4, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(5, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(6, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(7, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(8, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(9, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(10, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; + +-- job_param_config records +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('setting.speed.byte','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','5','REGEX','^[1-9]d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.speed.record','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','100','REGEX','^[1-9]d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1,NULL) +,('setting.speed.channel','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1,NULL) +,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1,NULL) +,('setting.errorLimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',0,'NUMBER','','','REGEX','^[0-9]d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1,NULL) +,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1,NULL) +,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[sS]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','ELASTICSEARCH','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]d*$','批量大小输入错误',0,0,'',1,'',1,'',1,NULL) +,('query','query条件','DATAX-SOURCE','MONGODB','INPUT','query','query条件','',0,'VARCHAR','','','REGEX','^[sS]{0,500}$','query条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MONGODB','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","REPLACE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','MONGODB','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL) +,('transferMode','传输方式','DATAX-SOURCE','HIVE','OPTION','transferMode','传输方式','',1,'OPTION','["二进制","记录"]','二进制','','','该传输方式不可用',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SOURCE','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('writeMode','写入方式','DATAX-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["append","truncate"]','append','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('nullFormat','空值字符','DATAX-SINK','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('nullFormat','空值字符','DATAX-SINK','ELASTICSEARCH','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','ORACLE','INPUT','where','WHERE条件',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,NULL,1,'',2,NULL,1,NULL) +,('writeMode','写入方式','DATAX-SINK','ORACLE','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1,NULL) +; + +-- engine_settings records +INSERT INTO `exchangis_engine_settings` (id, engine_name, engine_desc, engine_settings_value, engine_direction, res_loader_class, res_uploader_class, modify_time, create_time) VALUES +(1, 'datax', 'datax sync engine', '{}', 'mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle', 'com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceLoader', NULL, NULL, '2022-08-09 18:20:51.0'), +(2, 'sqoop', 'hadoop tool', '{}', 'mysql->hive,hive->mysql', '', NULL, NULL, '2022-08-09 18:20:51.0'); + +-- exchangis_job_transform_rule records +INSERT INTO `exchangis_job_transform_rule` (rule_name,rule_type,rule_source,data_source_type,engine_type,direction) VALUES +('es_with_post_processor','DEF','{"types": ["MAPPING", "PROCESSOR"]}','ELASTICSEARCH',NULL,'SINK') +,('es_fields_not_editable','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false}','ELASTICSEARCH',NULL,'SINK') +,('hive_sink_not_access','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false, "fieldAddEnable": false}','HIVE',NULL,'SINK') +,('mongo_field_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH"}','MONGODB',NULL,'SINK') +,('mysql_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": false}','MYSQL',NULL,'SOURCE') +; \ No newline at end of file diff --git a/db/1.1.2/exchangis_ddl.sql b/db/1.1.2/exchangis_ddl.sql new file mode 100644 index 000000000..3609cadfd --- /dev/null +++ b/db/1.1.2/exchangis_ddl.sql @@ -0,0 +1 @@ +ALTER TABLE exchangis_job_entity MODIFY COLUMN name varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL; \ No newline at end of file diff --git a/db/exchangis_ddl.sql b/db/exchangis_ddl.sql index fa99232bb..c04796624 100644 --- a/db/exchangis_ddl.sql +++ b/db/exchangis_ddl.sql @@ -1,4 +1,4 @@ --- exchangis_v4.exchangis_job_ds_bind definition +-- exchangis_job_ds_bind definition DROP TABLE IF EXISTS `exchangis_job_ds_bind`; CREATE TABLE `exchangis_job_ds_bind` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, @@ -9,11 +9,12 @@ CREATE TABLE `exchangis_job_ds_bind` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=59575 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- exchangis_v4.exchangis_job_entity definition + +-- exchangis_job_entity definition DROP TABLE IF EXISTS `exchangis_job_entity`; CREATE TABLE `exchangis_job_entity` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL, + `name` varchar(255) NOT NULL, `create_time` datetime DEFAULT NULL, `last_update_time` datetime(3) DEFAULT NULL, `engine_type` varchar(45) DEFAULT '', @@ -30,7 +31,8 @@ CREATE TABLE `exchangis_job_entity` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=5793 DEFAULT CHARSET=utf8; --- exchangis_v4.exchangis_job_param_config definition + +-- exchangis_job_param_config definition DROP TABLE IF EXISTS `exchangis_job_param_config`; CREATE TABLE `exchangis_job_param_config` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, @@ -57,40 +59,43 @@ CREATE TABLE `exchangis_job_param_config` ( `sort` int(11) DEFAULT NULL, `description` varchar(255) DEFAULT NULL, `status` tinyint(4) DEFAULT NULL, + `ref_id` bigint(20) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=32 DEFAULT CHARSET=utf8; --- exchangis_v4.exchangis_project_info definition +-- exchangis_project_info definition DROP TABLE IF EXISTS `exchangis_project_info`; +-- udes_gzpc_pub_sit_01.exchangis_project_info definition CREATE TABLE `exchangis_project_info` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `name` varchar(64) NOT NULL, `description` varchar(255) DEFAULT NULL, - `create_time` datetime DEFAULT NULL, - `last_update_time` datetime(3) DEFAULT NULL, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `last_update_time` datetime DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `create_user` varchar(64) DEFAULT NULL, `last_update_user` varchar(64) DEFAULT NULL, `project_labels` varchar(255) DEFAULT NULL, `domain` varchar(32) DEFAULT NULL, - `exec_users` varchar(255) DEFAULT NULL, - `view_users` varchar(255) DEFAULT NULL, - `edit_users` varchar(255) DEFAULT NULL, + `exec_users` varchar(255) DEFAULT '', + `view_users` varchar(255) DEFAULT '', + `edit_users` varchar(255) DEFAULT '', `source` text, PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=1497870871035973934 DEFAULT CHARSET=utf8; +) ENGINE=InnoDB AUTO_INCREMENT=1497870871035974171 DEFAULT CHARSET=utf8; --- exchangis_v4.exchangis_project_user definition +-- exchangis_project_user definition DROP TABLE IF EXISTS `exchangis_project_user`; CREATE TABLE `exchangis_project_user` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `project_id` bigint(20) NOT NULL, `priv_user` varchar(32) COLLATE utf8_bin DEFAULT NULL, `priv` int(20) DEFAULT NULL, - `last_update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; + `last_update_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `exchangis_project_user_un` (`project_id`) +) ENGINE=InnoDB AUTO_INCREMENT=844 DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; --- exchangis_v4.exchangis_launchable_task definition +-- exchangis_launchable_task definition DROP TABLE IF EXISTS `exchangis_launchable_task`; CREATE TABLE `exchangis_launchable_task` ( `id` bigint(13) NOT NULL, @@ -102,13 +107,13 @@ CREATE TABLE `exchangis_launchable_task` ( `execute_user` varchar(50) DEFAULT '', `linkis_job_name` varchar(100) NOT NULL, `linkis_job_content` text NOT NULL, - `linkis_params` varchar(255) DEFAULT NULL, + `linkis_params` text DEFAULT NULL, `linkis_source` varchar(64) DEFAULT NULL, `labels` varchar(64) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; --- exchangis_v4.exchangis_launched_job_entity definition +-- exchangis_launched_job_entity definition DROP TABLE IF EXISTS `exchangis_launched_job_entity`; CREATE TABLE `exchangis_launched_job_entity` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, @@ -132,7 +137,7 @@ CREATE TABLE `exchangis_launched_job_entity` ( UNIQUE KEY `job_execution_id_UNIQUE` (`job_execution_id`) ) ENGINE=InnoDB AUTO_INCREMENT=8380 DEFAULT CHARSET=utf8; --- exchangis_v4.exchangis_launched_task_entity definition +-- exchangis_launched_task_entity definition DROP TABLE IF EXISTS `exchangis_launched_task_entity`; CREATE TABLE `exchangis_launched_task_entity` ( `id` bigint(20) NOT NULL, @@ -158,19 +163,91 @@ CREATE TABLE `exchangis_launched_task_entity` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -INSERT INTO exchangis_job_param_config (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status) VALUES -('setting.speed.bytes','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','','REGEX','^[1-9]\\d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1) -,('setting.speed.records','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','','REGEX','^[1-9]\\d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1) -,('setting.max.parallelism','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1) -,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1) -,('setting.errorlimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',1,'NUMBER','','','REGEX','^[1-9]\\d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1) -,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1) -,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1) -,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1) -,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1) -,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1) -; -INSERT INTO exchangis_job_param_config (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status) VALUES -('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,NULL,1,NULL,1) -,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1) -; \ No newline at end of file +-- exchangis_job_func definition +DROP TABLE IF EXISTS `exchangis_job_func`; +CREATE TABLE `exchangis_job_func` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `func_type` varchar(50) NOT NULL, + `func_name` varchar(100) NOT NULL, + `tab_name` varchar(50) NOT NULL COMMENT 'Tab', + `name_dispaly` varchar(100) DEFAULT NULL, + `param_num` int(11) DEFAULT '0', + `ref_name` varchar(100) DEFAULT NULL, + `description` varchar(200) DEFAULT NULL, + `modify_time` datetime DEFAULT NULL, + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `job_func_tab_name_idx` (`tab_name`,`func_name`) +) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8; + +-- exchangis_job_func_params definition +DROP TABLE IF EXISTS `exchangis_job_func_params`; +CREATE TABLE IF NOT EXISTS `exchangis_job_func_params`( + `func_id` INT(11) NOT NULL, + `param_name` VARCHAR(100) NOT NULL, + `order` INT(11) DEFAULT 0, + `name_display` VARCHAR(100), + `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY(`func_id`, `param_name`) +)Engine=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_engine_resources definition +DROP TABLE IF EXISTS `exchangis_engine_resources`; +CREATE TABLE `exchangis_engine_resources` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `engine_type` varchar(50) NOT NULL, + `resource_name` varchar(100) NOT NULL, + `resource_type` varchar(50) NOT NULL COMMENT 'resource type' DEFAULT 'file', + `resource_path` varchar(255) NOT NULL, + `store_uri` varchar(500) NOT NULL, + `create_user` varchar(50) NOT NULL, + `modify_time` datetime DEFAULT NULL, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `engine_res_idx` (`engine_type`,`resource_path`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_engine_settings definition +DROP TABLE IF EXISTS `exchangis_engine_settings`; +CREATE TABLE `exchangis_engine_settings` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `engine_name` varchar(50) NOT NULL, + `engine_desc` varchar(500) NOT NULL, + `engine_settings_value` text, + `engine_direction` varchar(255) NOT NULL, + `res_loader_class` varchar(255), + `res_uploader_class` varchar(255), + `modify_time` datetime DEFAULT NULL, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `engine_setting_idx` (`engine_name`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_transform_rule +DROP TABLE IF EXISTS `exchangis_job_transform_rule`; +CREATE TABLE `exchangis_job_transform_rule` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `rule_name` varchar(100) NOT NULL DEFAULT 'transform_rule', + `rule_type` varchar(64) NOT NULL DEFAULT 'DEF', + `rule_source` varchar(600) DEFAULT '{}', + `data_source_type` varchar(64) NOT NULL, + `engine_type` varchar(32), + `direction` varchar(32) NOT NULL DEFAULT 'NONE', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_transform_processor +DROP TABLE IF EXISTS `exchangis_job_transform_processor`; +CREATE TABLE `exchangis_job_transform_processor` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `job_id` bigint(20) NOT NULL, + `code_content` text DEFAULT NULL, + `code_language` varchar(32) NOT NULL DEFAULT 'java', + `code_bml_resourceId` varchar(255) COMMENT 'BML resource id', + `code_bml_version` varchar(255) COMMENT 'BML version', + `creator` varchar(50) NOT NULL COMMENT 'Owner of processor', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; \ No newline at end of file diff --git a/db/exchangis_dml.sql b/db/exchangis_dml.sql index 6ea326fb9..2e6bee29e 100644 --- a/db/exchangis_dml.sql +++ b/db/exchangis_dml.sql @@ -1,17 +1,78 @@ --- 插入 job_param_config 记录 -INSERT INTO exchangis_job_param_config (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status) VALUES -('setting.speed.bytes','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','','REGEX','^[1-9]\\d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1) -,('setting.speed.records','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','','REGEX','^[1-9]\\d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1) -,('setting.max.parallelism','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1) -,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1) -,('setting.errorlimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',1,'NUMBER','','','REGEX','^[1-9]\\d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1) -,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1) -,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1) -,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1) -,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1) -,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1) +-- job_func records +INSERT INTO `exchangis_job_func`(func_type,func_name,tab_name,name_dispaly,param_num,ref_name,description,modify_time) VALUES +('TRANSFORM','dx_substr','DATAX',NULL,2,NULL,NULL,NULL) +,('TRANSFORM','dx_pad','DATAX',NULL,3,NULL,NULL,NULL) +,('TRANSFORM','dx_replace','DATAX',NULL,3,NULL,NULL,NULL) +,('VERIFY','like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','not like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','<','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','!=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('TRANSFORM','dx_precision','DATAX',NULL,1,NULL,NULL,NULL) ; -INSERT INTO exchangis_job_param_config (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status) VALUES -('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,NULL,1,NULL,1) -,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1) -; \ No newline at end of file + +-- job_func_params records +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padType', 'padType(r or l)', 0) ON DUPLICATE KEY UPDATE `name_display` = 'padType(r or l)'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padString', 'padString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'padString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'replaceString', 'replaceString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'replaceString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(4, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(5, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(6, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(7, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(8, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(9, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(10, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; + +-- job_param_config records +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('setting.speed.byte','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','5','REGEX','^[1-9]\\d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.speed.record','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','100','REGEX','^[1-9]\\d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1,NULL) +,('setting.speed.channel','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1,NULL) +,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1,NULL) +,('setting.errorLimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',0,'NUMBER','','','REGEX','^[0-9]\\d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1,NULL) +,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1,NULL) +,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','ELASTICSEARCH','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',1,'',1,NULL) +,('query','query条件','DATAX-SOURCE','MONGODB','INPUT','query','query条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','query条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MONGODB','OPTION','writeMode','写入方式','',1,'OPTION','["insert","replace"]','insert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','MONGODB','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL) +,('transferMode','传输方式','DATAX-SOURCE','HIVE','OPTION','transferMode','传输方式','',1,'OPTION','["二进制","记录"]','二进制','','','该传输方式不可用',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SOURCE','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('writeMode','写入方式','DATAX-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["append","truncate"]','append','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('nullFormat','空值字符','DATAX-SINK','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('nullFormat','空值字符','DATAX-SINK','ELASTICSEARCH','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('where','WHERE条件','SOURCE','ORACLE','INPUT','where','WHERE条件',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,NULL,1,'',2,NULL,1,NULL) +,('writeMode','写入方式','DATAX-SINK','ORACLE','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1,NULL) +; + +-- engine_settings records +INSERT INTO `exchangis_engine_settings` (id, engine_name, engine_desc, engine_settings_value, engine_direction, res_loader_class, res_uploader_class, modify_time) VALUES +(1, 'datax', 'datax sync engine', '{}', 'mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle', 'com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceLoader', NULL, NULL, '2022-08-09 18:20:51.0'), +(2, 'sqoop', 'hadoop tool', '{}', 'mysql->hive,hive->mysql', '', NULL, NULL); + +-- exchangis_job_transform_rule records +INSERT INTO `exchangis_job_transform_rule` (rule_name,rule_type,rule_source,data_source_type,engine_type,direction) VALUES +('es_with_post_processor','DEF','{"types": ["MAPPING", "PROCESSOR"]}','ELASTICSEARCH',NULL,'SINK') +,('es_fields_not_editable','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false}','ELASTICSEARCH',NULL,'SINK') +,('hive_sink_not_access','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false, "fieldAddEnable": false}','HIVE',NULL,'SINK') +,('mongo_field_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH"}','MONGODB',NULL,'SINK') +,('mysql_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": false}','MYSQL',NULL,'SOURCE') +; + diff --git a/docs/en_US/ch1/component_upgrade_en.md b/docs/en_US/ch1/component_upgrade_en.md index 7f9e69d95..3bc6e3b47 100644 --- a/docs/en_US/ch1/component_upgrade_en.md +++ b/docs/en_US/ch1/component_upgrade_en.md @@ -1,7 +1,7 @@ # Exchangis Component Upgrade Documentation -This article mainly introduces the upgrade steps for adapting DSS1.1.0 and Linkis1.1.1 on the basis of the original installation of the Exchangis service. The biggest difference between the Exchangis1.0.0 and the Exchangis1.0.0-rc1 version is the installation of the ExchangisAppconn, which needs to be replaced by the entire Exchangisappconn. and load### 1.升级Exchangis前的工作 -Before you upgrade Exchangis, please follow the[DSS1.1.0Install and deploy documentation](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.1-rc/docs/zh_CN/ch1/exchangis_deploy_cn.md) -and [Linkis1.1.1Install and deploy documentation](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.1-rc/docs/zh_CN/ch1/exchangis_deploy_cn.md)Complete the installation and upgrade of DSS and Linkis +This article mainly introduces the upgrade steps for adapting DSS1.1.2 and Linkis1.4.0 on the basis of the original installation of the Exchangis service. The biggest difference between the Exchangis1.1.2 and the Exchangis1.0.0 version is the installation of the ExchangisAppconn, which needs to be replaced by the entire Exchangisappconn. and load### 1.升级Exchangis前的工作 +Before you upgrade Exchangis, please follow the[DSS1.1.2Install and deploy documentation](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) +and [Linkis1.4.0Install and deploy documentation](https://linkis.staged.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick)Complete the installation and upgrade of DSS and Linkis ### 2.Exchangis upgrade steps @@ -13,7 +13,7 @@ Go to the following directory and find exchangis appconn folder and delete: ``` #### 2)Download binary package -We provide the upgrade material package of ExchangisAppconn, which you can download and use directly.[Click to jump Release interface](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.0.0-rc/exchangis-appconn.zip) +We provide the upgrade material package of ExchangisAppconn, which you can download and use directly.[Click to jump Release interface](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.1.2/Exchangis1.1.2_install_package.zip) #### 3) Compile and package @@ -72,4 +72,4 @@ After the installation and deployment of exchangis-appconn is completed, you can 2. Check whether the project is created synchronously on the exchange side. If the creation is successful, the appconn installation is successful. ![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) -For more usage, please refer to[Exchangis1.0 User Manual](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.1-rc/docs/zh_CN/ch1/exchangis_user_manual_cn.md) +For more usage, please refer to[Exchangis User Manual](docs/zh_CN/ch1/exchangis_user_manual_cn.md) diff --git a/docs/en_US/ch1/exchangis_appconn_deploy_en.md b/docs/en_US/ch1/exchangis_appconn_deploy_en.md index f42a85742..1b195c0c0 100644 --- a/docs/en_US/ch1/exchangis_appconn_deploy_en.md +++ b/docs/en_US/ch1/exchangis_appconn_deploy_en.md @@ -3,7 +3,7 @@ This paper mainly introduces the deployment, configuration and installation of ExchangisAppConn in DSS(DataSphere Studio)1.0.1. ### 1. Preparations for the deployment of ExchangisAppConn -Before you deploy ExchangisAppConn, please follow the [Exchangis1.0.0 to install the deployment document](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_deploy_en.md) to complete the installation of Exchangis1.0.0 and other related components, and ensure that the basic functions of the project are available. +Before you deploy ExchangisAppConn, please follow the [Exchangis to install the deployment document](docs/en_US/ch1/exchangis_deploy_en.md) to complete the installation of Exchangis and other related components, and ensure that the basic functions of the project are available. ### 2. Download and compilation of the ExchangisAppConn plugin #### 1) Download binary package @@ -67,7 +67,7 @@ After the exchangis-appconn is installed and deployed, the following steps can b 2. Check whether the project is created synchronously on Exchangis. Successful creation means successful installation of appconn ![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) -For more operation, please refer to [Exchangis 1.0 User Manual](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_user_manual_en.md) +For more operation, please refer to [Exchangis User Manual](docs/zh_CN/ch1/exchangis_user_manual_cn.md) ### 5.Exchangis AppConn installation principle diff --git a/docs/en_US/ch1/exchangis_datasource_en.md b/docs/en_US/ch1/exchangis_datasource_en.md index 394eae7cb..ffbffdf98 100644 --- a/docs/en_US/ch1/exchangis_datasource_en.md +++ b/docs/en_US/ch1/exchangis_datasource_en.md @@ -14,7 +14,7 @@ In order to build a common data source module, the data source module is mainly Figure 2-1 Overall Architecture Design -## 3、Detailed explanation of modules +## 3、Detailed explanation of modules ### 3.1 datasource-server @@ -106,7 +106,7 @@ Batch import and export of datasource configuration. Description: -The background needs to integrate it with the labeling function of Linkis1.1.1, and give the datasource a labeling relationship. +The background needs to integrate it with the labeling function of Linkis1.4.0, and give the datasource a labeling relationship. Process design: diff --git a/docs/en_US/ch1/exchangis_datax_deploy_en.md b/docs/en_US/ch1/exchangis_datax_deploy_en.md new file mode 100644 index 000000000..7458e8212 --- /dev/null +++ b/docs/en_US/ch1/exchangis_datax_deploy_en.md @@ -0,0 +1,84 @@ +# DataX engine uses documentation + +### Prepare the environment + +The DataX engine is an indispensable component for executing Exchangis data synchronization tasks. Data synchronization tasks can be performed only after the DataX engine is installed and deployed. Also, ensure that DataX is installed on the deployed machine. + +Before you install and deploy DataX engine, Please complete the installation of Exchangis and related components according to the [Exchangis installation and deployment document](docs/en_US/ch1/exchangis_deploy_en.md), and ensure that the basic functions of the project are available. + +It is strongly recommended that you use the native DataX to perform the test task on this node before performing the DataX task, so as to check whether the environment of this node is normal. + +| Environment variable name | Environment variable content | Remark | +| :-----------------------: | :--------------------------: | ------------ | +| JAVA_HOME | JDK installation path | Required | +| DATAX_HOME | DataX installation path | Not Required | +| DATAX_CONF_DIR | DataX config path | Not Required | + +### Prepare installation package + +#### 1)Download binary package + +Exchangis1.1.2 and Linkis 1.4.0 support the mainstream DataX versions 1.4.6 and 1.4.7, and later versions may need to modify some codes for recompilation. + +[Click to jump to Release interface](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.1.2) + +#### 2)Compile and package + +If you want to develop and compile datax engine yourself, the specific compilation steps are as follows: + +1.clone Exchangis's source code + +2.Under exchangis-plugins module, find sqoop engine and compile sqoop separately, as follows : + +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/engine/datax +mvn clean install +``` + +Then the datax engine installation package will be found in this path. + +``` +{EXCHANGIS_CODE_HOME}\exchangis-plugins\datax\target\out\datax +``` + + +### Start deployment + +#### 1)DataX engine installation + +1、Get the packed datax.zip material package, the directory structure is + +```shell +datax +-- dist +-- plugin +``` + +2、Place in the following directory in the linkis installation path + +```shell +cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins +``` + +(Note that depending on which users the current datax engine has permissions for, they are generally hadoop user groups and hadoop users) + + +#### 2)Restart linkis-engineplugin service to make datax engine take effect + +New engines joining linkis will not take effect until the engineplugin service of linkis is restarted, and the restart script is. /linkis-daemon.sh in the Linkis installation directory. The specific steps are as follows : + +``` +cd {LINKIS_INSTALL_HOME}/links/sbin/ +./linkis-daemon.sh restart cg-engineplugin +``` + +After the service is successfully started, check whether the datax engine is installed in the linkis database + +```shell +select * from linkis_cg_engine_conn_plugin_bml_resources where engine_conn_type='datax'; +``` + +At this point, the datax installation and deployment is complete. + +For a more detailed introduction of engineplugin, please refer to the following article. +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/en_US/ch1/exchangis_deploy_en.md b/docs/en_US/ch1/exchangis_deploy_en.md index 354bfea8c..bcef1232d 100644 --- a/docs/en_US/ch1/exchangis_deploy_en.md +++ b/docs/en_US/ch1/exchangis_deploy_en.md @@ -13,26 +13,26 @@ Exchangis installation is mainly divided into the following four steps : | Dependent components | Must be installed | Install through train | |------------------------------------------------------------------------------| ------ | --------------- | -| MySQL (5.5+) | yes | [How to install mysql](https://www.runoob.com/mysql/mysql-install.html) | -| JDK (1.8.0_141) | yes | [How to install JDK](https://www.runoob.com/java/java-environment-setup.html) | -| Hadoop(2.7.2,Other versions of Hadoop need to compile Linkis by themselves.) | yes | [Hadoop stand-alone deployment](https://hadoop.apache.org/releases.html) ;[Hadoop distributed deployment](https://hadoop.apache.org/releases.html) | -| Hive(2.3.3,Other versions of Hive need to compile Linkis by themselves.) | yes | [Hive quick installation](https://hive.apache.org/downloads.html) | +| JDK (1.8.0_141) | yes | [How to install JDK](https://www.oracle.com/java/technologies/downloads/) | +| MySQL (5.5+) | yes | [How to install mysql](https://mysql.net.cn/) | +| Hadoop(3.3.4,Other versions of Hadoop need to compile Linkis by themselves.) | yes | [Hadoop deployment](https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-3.3.4/hadoop-3.3.4.tar.gz) | +| Hive(2.3.3,Other versions of Hive need to compile Linkis by themselves.) | yes | [Hive quick installation](https://www.apache.org/dyn/closer.cgi/hive/) | | SQOOP (1.4.6) | yes | [How to install Sqoop](https://sqoop.apache.org/docs/1.4.6/SqoopUserGuide.html) | -| DSS1.1.0 | yes | [How to install DSS](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/en_US/Installation_and_Deployment/DSS%26Linkis_one-click_deployment_document_stand-alone_version.md) | -| Linkis1.1.1 | yes | [How to install Linkis](https://linkis.apache.org/zh-CN/docs/latest/deployment/deploy-quick) | -| Nginx | yes | [How to install Nginx](http://nginx.org/en/linux_packages.html) | +| DSS1.1.2 | yes | [How to install DSS](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) | +| Linkis1.4.0 | yes | [How to install Linkis](https://linkis.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick) | +| Nginx | yes | [How to install Nginx](http://nginx.org/) | Underlying component checking -$\color{#FF0000}{Note: be sure to reinstall dss1.1.0, and the linkis version must be greater than 1.1.1. Please recompile linkis and use the package released on June 15th }$ +$\color{#FF0000}{Note: be sure to reinstall dss1.1.2, and linkis1.4.0. Please recompile linkis and use the package released on June 15th }$ -[linkis1.1.1 code address ](https://github.com/apache/incubator-linkis/tree/release-1.1.1) +[linkis1.4.0 code address ](https://github.com/apache/incubator-linkis/tree/release-1.4.0) -[DSS1.1.0 code address ](https://github.com/WeBankFinTech/DataSphereStudio/tree/dev-1.1.0) +[DSS1.1.2 code address ](https://github.com/WeBankFinTech/DataSphereStudio) datasource enabled -By default, two services related to datasources (ps-data-source-manager, ps-metadatamanager) will not be started in the startup script of linkis. If you want to use datasource services, you can start them by modifying the export enable _ metadata _ manager = true value in $ linkis_conf_dir/linkis-env.sh. When the service is started and stopped through linkis-start-all.sh/linkis-stop-all.sh, the datasource service will be started and stopped. For more details about data sources, please refer to [Data Source Function Usage](https://linkis.apache.org/zh-CN/docs/latest/user-guide/datasource-manual) +By default, two services related to datasources (ps-data-source-manager, ps-metadatamanager) will not be started in the startup script of linkis. If you want to use datasource services, you can start them by modifying the export enable _ metadata _ manager = true value in $ linkis_conf_dir/linkis-env.sh. When the service is started and stopped through linkis-start-all.sh/linkis-stop-all.sh, the datasource service will be started and stopped. For more details about data sources, please refer to [Data Source Function Usage](https://linkis.apache.org/zh-CN/docs/1.1.0/deployment/start-metadatasource) #### 1.2 Create Linux users @@ -55,11 +55,11 @@ INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_t INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('开发环境UAT', '开发环境UAT', 4, '{"uris":"thrift://${HIVE_METADATA_IP}:${HIVE_METADATA_PORT}", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}', now(), NULL, now(), NULL); ``` -If the hive data source needs kerberos authentication when deployed, you need to specify a parameter keyTab in the parameter field of the Linkis_ps_dm_datasource_env table, and the way to obtain its value can be seen: [Setting and authenticating hive data source in linkis](https://linkis.apache.org/zh-CN/docs/latest/user-guide/datasource-manual). +If the hive data source needs kerberos authentication when deployed, you need to specify a parameter keyTab in the parameter field of the Linkis_ps_dm_datasource_env table, and the way to obtain its value can be seen: [Setting and authenticating hive data source in linkis](https://linkis.apache.org/zh-CN/docs/latest/auth/token). #### 1.4 Underlying component checking -Please ensure that DSS1.1.0 and Linkis1.1.1 are basically available. HiveQL scripts can be executed in the front-end interface of DSS, and DSS workflows can be created and executed normally. +Please ensure that DSS1.1.2 and Linkis1.4.0 are basically available. HiveQL scripts can be executed in the front-end interface of DSS, and DSS workflows can be created and executed normally. ## 2. Exchangis installation and deployment @@ -67,7 +67,7 @@ Please ensure that DSS1.1.0 and Linkis1.1.1 are basically available. HiveQL scri #### 2.1.1 Download binary package -Download the latest installation package from the Released release of Exchangis [click to jump to the release interface](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.0.0). +Download the latest installation package from the Released release of Exchangis [click to jump to the release interface](https://github.com/WeBankFinTech/Exchangis/releases). #### 2.1.2 Compile and package @@ -94,6 +94,8 @@ Download the latest installation package from the Released release of Exchangis |-- db:Database initialization SQL directory |-- exchangis-extds |-- packages:Exchangis installation package directory + |-- exchangis-extds:exchangis datasource library + |-- lib:library |-- sbin:Script storage directory ``` @@ -159,7 +161,28 @@ DATABASE= Enter `y` to start installing Exchange IS service, or `n` to not install it. -#### 2.5.3 Start service +#### 2.5.3 Change the path of the configuration file and log file + +In the 'env.properties' file in the sbin directory, set the configuration file path and log file path + +```yaml +EXCHANGIS_CONF_PATH="/appcom/config/exchangis-config/background" +EXCHANGIS_LOG_PATH="/appcom/logs/exchangis/background" +MODULE_DEFAULT_PREFIX="dss-exchangis-main-" +MODULE_DEFAULT_SUFFIX="-dev" +``` + +EXCHANGIS_CONF_PATH indicates the configuration file path, and EXCHANGIS_LOG_PATH indicates the log file path. If the preceding configurations are used, perform the following operations: + +```yaml +cd {EXCHANGIS_DEPLOY_PATH} +cp -r config /appcom/config/exchangis-config/background +mkdir -p /appcom/logs/exchangis/background +``` + +When the service is started, the configuration file in the corresponding path is used and logs are written to the corresponding path + +#### 2.5.4 Start service Execute the following command to start Exchangis Server: @@ -175,7 +198,7 @@ Execute the following command to start Exchangis Server: After executing the startup script, the following prompt will appear, eureka address will also be typed in the console when starting the service: -![企业微信截图_16532930262583](https://user-images.githubusercontent.com/27387830/169773764-1c5ed6fb-35e9-48cb-bac8-6fa7f738368a.png) +![企业微信截图_16532930262583](../../../images/zh_CN/ch1/register_eureka.png) ### 2.6 Check whether the service started successfully. @@ -191,7 +214,7 @@ As shown in the figure below: #### 2.7.1 Get the front-end installation package -Exchangis has provided compiled front-end installation package by default, which can be downloaded and used directly :[Click to jump to the Release interface](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.0.0/dist.zip) +Exchangis has provided compiled front-end installation package by default, which can be downloaded and used directly :[Click to jump to the Release interface](https://github.com/WeBankFinTech/Exchangis/releases) You can also compile the exchange front-end by yourself and execute the following command in the exchanise root directory: @@ -205,7 +228,7 @@ Get the compiled dist.zip front-end package from the `web/` path. The acquired front-end package can be placed anywhere on the server. Here, it is recommended that you keep the same directory as the back-end installation address, place it in the same directory and unzip it. -#### 2.7.2 Front-end installation deployment +#### 3.3.4 Front-end installation deployment 1. Decompress front-end installation package @@ -273,12 +296,12 @@ Please visit the Exchange front-end page at http://${EXCHANGIS_INSTALL_IP}:8098 ## 3. DSS ExchangisAppConn installation and deployment -If you want to use Exchangis1.0.0 front-end, you also need to install the DSS ExchangisAppConn plugin. Please refer to: [ExchangisAppConn installation documentation for plugins ](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_appconn_deploy_en.md) +If you want to use Exchangis front-end, you also need to install the DSS ExchangisAppConn plugin. Please refer to: [ExchangisAppConn installation documentation for plugins ](docs/en_US/ch1/exchangis_appconn_deploy_en.md) ## 4. Linkis Sqoop engine installation and deployment -If you want to execute the Sqoop operation of Exchangis1.0.0 normally, you also need to install the Linkis Sqoop engine. Please refer to: : [Linkis Sqoop engine installation documentation ](https://linkis.apache.org/zh-CN/docs/latest/engine-usage/sqoop) +If you want to execute the Sqoop operation of Exchangis normally, you also need to install the Linkis Sqoop engine. Please refer to: : [Linkis Sqoop engine installation documentation ](https://linkis.apache.org/zh-CN/docs/1.1.2/engine-usage/sqoop/) ## 5. How to log in and use Exchangis -Exchangis1.0 for more instructions, please refer to the user manual.[Exchangis1.0 user manual](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_user_manual_en.md) +Exchangis for more instructions, please refer to the user manual.[Exchangis user manual](docs/en_US/ch1/exchangis_user_manual_en.md) diff --git a/docs/en_US/ch1/exchangis_sqoop_deploy_en.md b/docs/en_US/ch1/exchangis_sqoop_deploy_en.md index 9ef74d74c..062f797d0 100644 --- a/docs/en_US/ch1/exchangis_sqoop_deploy_en.md +++ b/docs/en_US/ch1/exchangis_sqoop_deploy_en.md @@ -2,7 +2,7 @@ ### Prepare the environment Sqoop engine is an indispensable component to perform Exchange IS data synchronization task, and only after the installation and deployment of Sqoop engine can it successfully perform data synchronization task. At the same time, make sure sqoop is installed on the deployed machine. -Before you install and deploy Sqoop engine, Please complete the installation of Exchangis 1.0.0 and related components according to the [Exchangis 1.0.0 installation and deployment document](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_deploy_en.md), and ensure that the basic functions of the project are available. +Before you install and deploy Sqoop engine, Please complete the installation of Exchangis and related components according to the [Exchangis installation and deployment document](docs/en_US/ch1/exchangis_deploy_en.md), and ensure that the basic functions of the project are available. Sqoop engine mainly depends on Hadoop basic environment. If this node needs to deploy Sqoop engine, it needs to deploy Hadoop client environment. @@ -26,9 +26,9 @@ It is strongly recommended that you use the native Sqoop to perform the test tas ### Prepare installation package #### 1)Download binary package -Exchangis1.0.0 and Linkis 1.1.1 support the mainstream Sqoop versions 1.4.6 and 1.4.7, and later versions may need to modify some codes for recompilation. +Exchangis1.1.2 and Linkis 1.4.0 support the mainstream Sqoop versions 1.4.6 and 1.4.7, and later versions may need to modify some codes for recompilation. -[Click to jump to Release interface](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.0.0) +[Click to jump to Release interface](https://github.com/WeBankFinTech/Exchangis/releases) #### 2)Compile and package If you want to develop and compile sqoop engine yourself, the specific compilation steps are as follows: @@ -74,4 +74,4 @@ cd {LINKIS_INSTALL_HOME}/links/sbin/ After the service is successfully started, the installation and deployment of sqoop will be completed. For a more detailed introduction of engineplugin, please refer to the following article. -https://linkis.apache.org/zh-CN/docs/latest/architecture/computation-governance-services/engine/engine-conn \ No newline at end of file +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/en_US/ch1/exchangis_user_manual_en.md b/docs/en_US/ch1/exchangis_user_manual_en.md index 7ad3b9e6a..bdd0856f9 100644 --- a/docs/en_US/ch1/exchangis_user_manual_en.md +++ b/docs/en_US/ch1/exchangis_user_manual_en.md @@ -6,7 +6,7 @@ ## 二、Login Exchangis1.0 -   Exchangis1.0 is currently a part of DSS**data exchange component**, and it can be accessed in the component list by logging in to DSS. Therefore, before using Exchangis 1.0, please make basic deployment of DSS, Exchange IS 1.0, Linkis and other related components to ensure that the components' functions are available. This article will not go into details. See for details:[exchangis_deploy_en](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_deploy_en.md)和[exchangis-appconn_deploy_en](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/en_US/ch1/exchangis_appconn_deploy_en.md) +   Exchangis1.0 is currently a part of DSS**data exchange component**, and it can be accessed in the component list by logging in to DSS. Therefore, before using Exchangis 1.0, please make basic deployment of DSS, Exchange IS 1.0, Linkis and other related components to ensure that the components' functions are available. This article will not go into details. See for details:[exchangis_deploy_en](docs/en_US/ch1/exchangis_deploy_en.md)和[exchangis-appconn_deploy_en](docs/en_US/ch1/exchangis_appconn_deploy_en.md) ### 1、Login DSS diff --git a/docs/zh_CN/ch1/component_upgrade_cn.md b/docs/zh_CN/ch1/component_upgrade_cn.md index fd4501f35..94ca8e7f2 100644 --- a/docs/zh_CN/ch1/component_upgrade_cn.md +++ b/docs/zh_CN/ch1/component_upgrade_cn.md @@ -1,8 +1,8 @@ Exchangis 升级文档 -本文主要介绍在原有安装Exchangis服务的基础上适配DSS1.1.0和Linkis1.1.1的升级步骤,Exchangis1.0.0相对与Exchangis1.0.0-rc1版本最大的区别在于ExchangisAppconn的安装,需要对整个Exchangisappconn进行重新替换和加载 +本文主要介绍在原有安装Exchangis服务的基础上适配DSS1.1.2和Linkis1.4.0的升级步骤,Exchangis1.1.2相对与Exchangis1.0.0版本最大的区别在于ExchangisAppconn的安装,需要对整个Exchangisappconn进行重新替换和加载 ### 1.升级Exchangis前的工作 -您在升级Exchangis之前,请按照[DSS1.1.0安装部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.1-rc/docs/zh_CN/ch1/exchangis_deploy_cn.md) -和[Linkis1.1.1安装部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.1-rc/docs/zh_CN/ch1/exchangis_deploy_cn.md)完成DSS和Linkis的安装升级 +您在升级Exchangis之前,请按照[DSS1.1.2安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) +和[Linkis1.4.0安装部署文档](https://linkis.staged.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick)完成DSS和Linkis的安装升级 ### 2.Exchangis升级步骤 @@ -14,7 +14,7 @@ Exchangis 升级文档 ``` #### 2)下载二进制包 -我们提供ExchangisAppconn的升级物料包,您可直接下载使用。[点击跳转 Release 界面](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.0.0-rc/exchangis-appconn.zip) +我们提供ExchangisAppconn的升级物料包,您可直接下载使用。[点击跳转 Release 界面](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.1.2/Exchangis1.1.2_install_package.zip) #### 3) 编译打包 @@ -66,20 +66,20 @@ sh ./appconn-refresh.sh #### 4.2)更新Exchangis安装目录下的lib包 -将从下载链接中得到的exchangis-project-server-1.0.0.jar和exchangis-server-1.0.0.jar两个包放入以下Exchangis安装目录的文件路径下(先删除原有旧的这两个包): +将从下载链接中得到的exchangis-project-server-1.1.2.jar和exchangis-server-1.1.2.jar两个包放入以下Exchangis安装目录的文件路径下(先删除原有旧的这两个包): ```$xslt lib/exchangis-server ``` 再通过以下命令完成 Exchangis Server 的更新重启: - + ```shell script ./sbin/daemon.sh restart server ``` -# 4.3)更新exchangis-server.propertis文件 +# 4.3)更新dss-exchangis-server.propertis文件 -将exchangis-server.propertis文件中的最后一行进行替换,替换内容如下 +将dss-exchangis-server.propertis文件中的最后一行进行替换,替换内容如下 ```$xslt wds.linkis-session.ticket.key=bdp-user-ticket-id @@ -94,4 +94,4 @@ wds.linkis-session.ticket.key=bdp-user-ticket-id ![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) - 更多使用操作可参照[Exchangis1.0用户手册](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.1-rc/docs/zh_CN/ch1/exchangis_user_manual_cn.md) + 更多使用操作可参照[Exchangis用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) diff --git a/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md b/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md index 9144ad166..5225fc180 100644 --- a/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md +++ b/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md @@ -1,11 +1,11 @@ ExchangisAppConn安装文档 本文主要介绍在DSS(DataSphere Studio)1.0.1中ExchangisAppConn的部署、配置以及安装 ### 1.部署ExchangisAppConn的准备工作 -您在部署ExchangisAppConn之前,请按照[Exchangis1.0.0安装部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis1.0.0及其他相关组件的安装,并确保工程基本功能可用。 +您在部署ExchangisAppConn之前,请按照[Exchangis安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis及其他相关组件的安装,并确保工程基本功能可用。 ### 2.ExchangisAppConn插件的下载和编译 #### 1)下载二进制包 -我们提供ExchangisAppconn的物料包,您可直接下载使用。[点击跳转 Release 界面](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.0.0/exchangis-appconn.zip) +我们提供ExchangisAppconn的物料包,您可直接下载使用。[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases) #### 2) 编译打包 如果您想自己开发和编译ExchangisAppConn,具体编译步骤如下: @@ -64,7 +64,7 @@ sh ./dss-start-all.sh 2. 在exchangis端查看是否同步创建项目,创建成功说明appconn安装成功 ![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) -更多使用操作可参照[Exchangis1.0用户手册](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_user_manual_cn.md) +更多使用操作可参照[Exchangis用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) ### 5.Exchangis AppConn安装原理 diff --git a/docs/zh_CN/ch1/exchangis_datasource_cn.md b/docs/zh_CN/ch1/exchangis_datasource_cn.md index 5bfb1c423..b7466d4bf 100644 --- a/docs/zh_CN/ch1/exchangis_datasource_cn.md +++ b/docs/zh_CN/ch1/exchangis_datasource_cn.md @@ -106,7 +106,7 @@ 需求描述: -后台需要将其和Linkis1.1.1的标签功能相整合,为数据源赋予标签关系。 +后台需要将其和Linkis1.4.0的标签功能相整合,为数据源赋予标签关系。 流程设计: diff --git a/docs/zh_CN/ch1/exchangis_datax_deploy_cn.md b/docs/zh_CN/ch1/exchangis_datax_deploy_cn.md new file mode 100644 index 000000000..00a09f989 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_datax_deploy_cn.md @@ -0,0 +1,72 @@ +# DataX 引擎使用文档 +### 环境准备 +DataX引擎是执行Exchangis数据同步任务不可或缺的组件,只有安装部署完成DataX引擎才能够成功执行数据同步任务。同时,确保所部署的机器上有安装DataX。 + +您在安装部署DataX引擎之前,请按照[Exchangis安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis及相关组件的安装,并确保工程基本功能可用。 + +强烈建议您在执行DataX任务之前,先在该节点使用原生的DataX执行测试任务,以检测该节点环境是否正常。 + +| 环境变量名 | 环境变量内容 | 备注 | +| :----: | :----: |-------| +| JAVA_HOME | JDK安装路径 | 必须 | +| DataX_HOME | DataX安装路径 | 非必须 | +| DataX_CONF_DIR | DataX配置路径 | 非必须 | + +### 安装包准备 +#### 1)下载二进制包 + +Exchangis1.1.2和Linkis 1.4.0支持的主流DataX版本1.4.6与1.4.7,更高版本可能需要修改部分代码重新编译。 + +[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.1.2) + +#### 2) 编译打包 +如果您想自己开发和编译datax引擎,具体编译步骤如下: + +1、克隆Exchangis的代码 + +2、在exchangis-plugins模块下,找到datax引擎,单独编译datax,操作如下 + +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/engine/datax +mvn clean install +``` +然后会在该路径下找到datax引擎安装包 +``` +{EXCHANGIS_CODE_HOME}/exchangis-plugins/datax/target/out +``` + + +### 开始部署 +#### 1)DataX引擎安装 +1、拿到打包出来的datax物料包,目录结构为: + +```shell +datax +-- dist +-- plugin +``` + +2、放置到linkis安装路径的如下目录 + +```shell +cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins +``` +(注意,看当前datax引擎对哪些用户有权限,一般都为hadoop用户组和hadoop用户) + + +#### 2)重启linkis-engineplugin服务使datax引擎生效 +新加入linkis的引擎都要重启linkis的engineplugin服务才会生效,重启脚本为linkis安装目录下的./sbin/linkis-daemon.sh,具体步骤如下 +```shell +cd {LINKIS_INSTALL_HOME}/links/sbin/ +./linkis-daemon.sh restart cg-engineplugin +``` +待服务启动成功,在linkis数据库中校验datax引擎是否安装完毕 + +```shell +select * from linkis_cg_engine_conn_plugin_bml_resources where engine_conn_type='datax'; +``` + +至此,datax安装部署就完成了。 + +engineplugin更详细的介绍可以参看下面的文章。 +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/zh_CN/ch1/exchangis_deploy_cn.md b/docs/zh_CN/ch1/exchangis_deploy_cn.md index f6943fe21..79c3fc139 100644 --- a/docs/zh_CN/ch1/exchangis_deploy_cn.md +++ b/docs/zh_CN/ch1/exchangis_deploy_cn.md @@ -6,6 +6,7 @@ Exchangis 的安装,主要分为以下四步: 2. Exchangis 安装部署 3. DSS ExchangisAppConn 安装部署 4. Linkis Sqoop 引擎安装部署 +5. Linkis DataX 引擎安装部署 ## 1. Exchangis 依赖环境准备 @@ -13,26 +14,26 @@ Exchangis 的安装,主要分为以下四步: | 依赖的组件 | 是否必装 | 安装直通车 | |---------------------------------------| ------ | --------------- | -| MySQL (5.5+) | 必装 | [如何安装mysql](https://www.runoob.com/mysql/mysql-install.html) | -| JDK (1.8.0_141) | 必装 | [如何安装JDK](https://www.runoob.com/java/java-environment-setup.html) | -| Hadoop(2.7.2,Hadoop 其他版本需自行编译 Linkis) | 必装 | [Hadoop单机部署](https://hadoop.apache.org/releases.html) ;[Hadoop分布式部署](https://hadoop.apache.org/releases.html) | -| Hive(2.3.3,Hive 其他版本需自行编译 Linkis) | 必装 | [Hive快速安装](https://hive.apache.org/downloads.html) | +| JDK (1.8.0_141) | 必装 | [如何安装JDK](https://www.oracle.com/java/technologies/downloads/) | +| MySQL (5.5+) | 必装 | [如何安装mysql](https://www.runoob.com/mysql/mysql-install.html) | +| Hadoop(3.3.4,Hadoop 其他版本需自行编译 Linkis) | 必装 | [Hadoop部署](https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-3.3.4/hadoop-3.3.4.tar.gz) | +| Hive(2.3.3,Hive 其他版本需自行编译 Linkis) | 必装 | [Hive快速安装](https://www.apache.org/dyn/closer.cgi/hive/) | | SQOOP (1.4.6) | 必装 | [如何安装Sqoop](https://sqoop.apache.org/docs/1.4.6/SqoopUserGuide.html) | -| DSS1.1.0 | 必装 | [如何安装DSS](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/en_US/Installation_and_Deployment/DSS%26Linkis_one-click_deployment_document_stand-alone_version.md) | -| Linkis1.1.1 | 必装 | [如何安装Linkis](https://linkis.apache.org/zh-CN/docs/latest/deployment/deploy-quick) | -| Nginx | 必装 | [如何安装 Nginx](http://nginx.org/en/linux_packages.html) | +| DSS1.1.2 | 必装 | [如何安装DSS](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) | +| Linkis1.4.0 | 必装 | [如何安装Linkis](https://linkis.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick) | +| Nginx | 必装 | [如何安装 Nginx](http://nginx.org/) | 底层依赖组件检查 -$\color{#FF0000}{注意:一定要使用最新版的dss1.1.0,及linkis1.1.1}$。 +注意:一定要使用最新版的dss1.1.2,及linkis1.4.0。 -[linkis1.1.1代码地址](https://github.com/apache/incubator-linkis/tree/release-1.1.1) +[linkis1.4.0代码地址](https://github.com/apache/incubator-linkis/tree/release-1.4.0) -[DSS1.1.0代码地址](https://github.com/WeBankFinTech/DataSphereStudio/tree/dev-1.1.0) +[DSS1.1.2代码地址 ](https://github.com/WeBankFinTech/DataSphereStudio) datasource启用 -linkis的启动脚本中默认不会启动数据源相关的服务两个服务(ps-data-source-manager,ps-metadatamanager), 如果想使用数据源服务,可以通过如下方式进行开启: 修改$LINKIS_CONF_DIR/linkis-env.sh中的 export ENABLE_METADATA_MANAGER=true值为true。 通过linkis-start-all.sh/linkis-stop-all.sh 进行服务启停时,会进行数据源服务的启动与停止。关于数据源更多详情可参考[数据源功能使用](https://linkis.apache.org/zh-CN/docs/latest/user-guide/datasource-manual) +linkis的启动脚本中默认不会启动数据源相关的服务两个服务(ps-data-source-manager,ps-metadatamanager), 如果想使用数据源服务,可以通过如下方式进行开启: 修改$LINKIS_CONF_DIR/linkis-env.sh中的 export ENABLE_METADATA_MANAGER=true值为true。 通过linkis-start-all.sh/linkis-stop-all.sh 进行服务启停时,会进行数据源服务的启动与停止。关于数据源更多详情可参考[数据源功能使用](https://linkis.apache.org/zh-CN/docs/1.4.0/user-guide/datasource-manual) #### 1.2 创建 Linux 用户 @@ -57,11 +58,11 @@ INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_t INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('开发环境UAT', '开发环境UAT', 4, '{"uris":"thrift://${HIVE_METADATA_IP}:${HIVE_METADATA_PORT}", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}', now(), NULL, now(), NULL); ``` -如果hive数据源在部署时设置了需要进行kerberos方式认证,则需要在linkis_ps_dm_datasource_env表的parameter字段指定一个参数keyTab,其值的获取方式可见:[在Linkis中设置并认证hive数据源](https://linkis.apache.org/zh-CN/docs/latest/user-guide/datasource-manual) +如果hive数据源在部署时设置了需要进行kerberos方式认证,则需要在linkis_ps_dm_datasource_env表的parameter字段指定一个参数keyTab,其值的获取方式可见:[在Linkis中设置并认证hive数据源](https://linkis.apache.org/zh-CN/docs/latest/auth/token) #### 1.4 底层依赖组件检查 -**请确保 DSS1.1.0 与 Linkis1.1.1 基本可用,可在 DSS 前端界面执行 HiveQL 脚本,可正常创建并执行 DSS 工作流。** +**请确保 DSS1.1.2 与 Linkis1.4.0 基本可用,可在 DSS 前端界面执行 HiveQL 脚本,可正常创建并执行 DSS 工作流。** ## 2. Exchangis 安装部署 @@ -70,7 +71,7 @@ INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_t #### 2.1.1下载二进制包 -从 Exchangis 已发布的 release 中 [点击下载exchangis安装包](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.0.0),下载最新的安装包。 +从 Exchangis 已发布的 release 中 [点击下载exchangis安装包](https://github.com/WeBankFinTech/Exchangis/releases),下载最新的安装包。 #### 2.1.2 编译打包 @@ -95,8 +96,9 @@ INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_t ```html |-- config:一键安装部署参数配置目录 |-- db:数据库表初始化 SQL 目录 -|-- exchangis-extds |-- packages:Exchangis 安装包目录 + |-- exchangis-extds:数据源扩展库 + |-- lib:库 |-- sbin:脚本存放目录 ``` @@ -156,7 +158,28 @@ DATABASE={dbName} 输入 `y` 初始化数据库表,输入 `n` 跳过数据库表初始化步骤。 -#### 2.5.3 启动服务 +#### 2.5.3 修改配置文件路径和日志文件路径 + +在sbin目录下的`env.properties`文件,设置配置文件路径和日志文件路径 + +```yaml +EXCHANGIS_CONF_PATH="/appcom/config/exchangis-config/background" +EXCHANGIS_LOG_PATH="/appcom/logs/exchangis/background" +MODULE_DEFAULT_PREFIX="dss-exchangis-main-" +MODULE_DEFAULT_SUFFIX="-dev" +``` + +EXCHANGIS_CONF_PATH为配置文件路径,EXCHANGIS_LOG_PATH为日志文件路径,若为以上配置,则作如下操作: + +```shell +cd {EXCHANGIS_DEPLOY_PATH} +cp -r config /appcom/config/exchangis-config/background +mkdir -p /appcom/logs/exchangis/background +``` + +则在服务启动时,将会使用对应路径下的配置文件,以及将日志写到对应的路径下 + +#### 2.5.4 启动服务 第一次启动,可以sbin目录下执行以下命令,启动 Exchangis Server: @@ -172,7 +195,7 @@ DATABASE={dbName} 执行完成启动脚本后,会出现以下提示,eureka地址也会在启动服务时在控制台打出: -![企业微信截图_16532930262583](https://user-images.githubusercontent.com/27387830/173892397-7cc7e988-0222-4f64-92ed-2cc58669770e.png) +![企业微信截图_16532930262583](../../../images/zh_CN/ch1/register_eureka.png) ### 2.6 查看服务是否启动成功 @@ -188,7 +211,7 @@ DATABASE={dbName} #### 2.7.1 获取前端安装包 -Exchangis 已默认提供了编译好的前端安装包,可直接下载使用:[点击下载前端安装包](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.0.0/dist.zip) +Exchangis 已默认提供了编译好的前端安装包,可直接下载使用:[点击下载前端安装包](https://github.com/WeBankFinTech/Exchangis/releases) 您也可以自行编译 Exchangis 前端,在 Exchangis 根目录下执行如下命令: @@ -198,23 +221,22 @@ Exchangis 已默认提供了编译好的前端安装包,可直接下载使用 npm run build ``` -从 `web/` 路径获取编译好的 dist.zip 前端包。 +从 `web/` 路径获取编译好的 exchangis-ui.zip 前端包。 获取到的前端包,您可以放在服务器上的任意位置,这里建议您与后端安装地址目录保持一致,在同一目录下放置并解压。 -#### 2.7.2 前端安装部署 +#### 3.3.4 前端安装部署 1. 解压前端安装包 -如您打算将 Exchangis 前端包部署到 `/appcom/Install/exchangis/web` 目录,请先将 `dist.zip` 拷贝到该目录并执行解压,注意,**请在安装dss的机器上安装exchangis前端**: +如您打算将 Exchangis 前端包部署到 `/appcom/Install/ExchangisInstall/exchangis-ui` 目录,请先将 `exchangis-ui.zip` 拷贝到该目录并执行解压,注意,**请在安装dss的机器上安装exchangis前端**: ```shell script - # 请先将 Exchangis 前端包拷贝到 `/appcom/Install/exchangis/web` 目录 - cd /appcom/Install/exchangis/web - unzip dist.zip + # 请先将 Exchangis 前端包拷贝到 `/appcom/Install/ExchangisInstall` 目录 + cd /appcom/Install/ExchangisInstall + unzip exchangis-ui.zip ``` - 执行如下命令: ```shell script @@ -228,7 +250,7 @@ Exchangis 已默认提供了编译好的前端安装包,可直接下载使用 #charset koi8-r; #access_log /var/log/nginx/host.access.log main; location / { - root /appcom/Install/exchangis/web/dist; # Exchangis 前端部署目录 + root /appcom/Install/ExchangisInstall/exchangis-ui; # Exchangis 前端部署目录 autoindex on; } @@ -271,12 +293,16 @@ Exchangis 已默认提供了编译好的前端安装包,可直接下载使用 ## 3. DSS ExchangisAppConn 安装部署 -如您想正常使用 Exchangis1.0.0 前端,还需安装 DSS ExchangisAppConn 插件,请参考: [ExchangisAppConn 插件安装文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md) +如您想正常使用 Exchangis 前端,还需安装 DSS ExchangisAppConn 插件,请参考: [ExchangisAppConn 插件安装文档](docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md) ## 4. Linkis Sqoop 引擎安装部署 -如您想正常执行 Exchangis1.0.0 的 Sqoop作业,还需安装 Linkis Sqoop 引擎,请参考: [Linkis Sqoop 引擎插件安装文档](https://linkis.apache.org/zh-CN/docs/latest/engine-usage/sqoop) +如您想正常执行 Exchangis 的Sqoop作业,还需安装 Linkis Sqoop 引擎,请参考: [Linkis Sqoop 引擎插件安装文档](docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md) + +## 5. Linkis DataX 引擎安装部署 + +如您想正常执行 Exchangis 的DataX作业,还需安装 Linkis DataX 引擎,请参考: [Linkis DataX 引擎插件安装文档](docs/zh_CN/ch1/exchangis_datax_deploy_cn.md) -## 5. 如何登录使用 Exchangis +## 6. 如何登录使用 Exchangis -Exchangis1.0更多使用说明,请参考用户使用手册[Exchangis1.0 用户手册](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_user_manual_cn.md) +Exchangis 更多使用说明,请参考用户使用手册[Exchangis 用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) diff --git a/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md b/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md index 1e457d825..f4de28ac2 100644 --- a/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md +++ b/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md @@ -2,7 +2,7 @@ ### 环境准备 Sqoop引擎是执行Exchangis数据同步任务不可或缺的组件,只有安装部署完成Sqoop引擎才能够成功执行数据同步任务。同时,确保所部署的机器上有安装sqoop。 -您在安装部署Sqoop引擎之前,请按照[Exchangis1.0.0安装部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis1.0.0及相关组件的安装,并确保工程基本功能可用。 +您在安装部署Sqoop引擎之前,请按照[Exchangis安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis及相关组件的安装,并确保工程基本功能可用。 Sqoop引擎主要依赖Hadoop基础环境,如果该节点需要部署Sqoop引擎,需要部署Hadoop客户端环境。 @@ -26,9 +26,9 @@ Sqoop引擎主要依赖Hadoop基础环境,如果该节点需要部署Sqoop引 ### 安装包准备 #### 1)下载二进制包 -Exchangis1.0.0和Linkis 1.1.1支持的主流Sqoop版本1.4.6与1.4.7,更高版本可能需要修改部分代码重新编译。 +Exchangis1.1.2和Linkis 1.4.0支持的主流Sqoop版本1.4.6与1.4.7,更高版本可能需要修改部分代码重新编译。 -[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.0.0) +[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases) #### 2) 编译打包 如果您想自己开发和编译sqoop引擎,具体编译步骤如下: @@ -37,12 +37,12 @@ Exchangis1.0.0和Linkis 1.1.1支持的主流Sqoop版本1.4.6与1.4.7,更高版 2.在exchangis-plugins模块下,找到sqoop引擎,单独编译sqoop,操作如下 ``` -cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/engine/sqoop +cd {EXCHANGIS_CODE_HOME}/exchangis-engines/engine-plugins/sqoop mvn clean install ``` 然后会在该路径下找到sqoop引擎安装包 ``` -{EXCHANGIS_CODE_HOME}/exchangis-plugins/sqoop/target/out +{EXCHANGIS_CODE_HOME}/exchangis-engines/engine-plugins/sqoop/target/out ``` @@ -59,9 +59,9 @@ sqoop 2.放置到linkis安装路径的如下目录 ```shell -cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins +cd {LINKIS_HOME}/linkis-engineconn-plugins ``` -(注意,看当前sqoop引擎对哪些用户有权限,不一定是root) +(注意,看当前sqoop引擎对哪些用户有权限,不一定是root) #### 2)重启linkis-engineplugin服务使sqoop引擎生效 @@ -70,7 +70,13 @@ cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins cd {LINKIS_INSTALL_HOME}/links/sbin/ ./linkis-daemon.sh restart cg-engineplugin ``` -待服务启动成功,至此,sqoop安装部署就完成了。 +待服务启动成功,在linkis数据库中校验sqoop引擎是否安装完毕 + +```yaml +select * from linkis_cg_engine_conn_plugin_bml_resources where engine_conn_type='sqoop'; +``` + +至此,sqoop安装部署就完成了。 engineplugin更详细的介绍可以参看下面的文章。 -https://linkis.apache.org/zh-CN/docs/latest/architecture/computation-governance-services/engine/engine-conn +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/zh_CN/ch1/exchangis_user_manual_cn.md b/docs/zh_CN/ch1/exchangis_user_manual_cn.md index 1e7c3c2cc..005c9e189 100644 --- a/docs/zh_CN/ch1/exchangis_user_manual_cn.md +++ b/docs/zh_CN/ch1/exchangis_user_manual_cn.md @@ -6,7 +6,7 @@ ## 二、登录Exchangis1.0 -  Exchangis1.0目前作为DSS**数据交换组件**的一部分,通过登录DSS的方式在组件列表中免密进入。所以,在使用Exchangis1.0之前,请对DSS,Exchangis1.0,Linkis等相关组件进行基本部署,保证组件功能可用,本文不进行赘述,详情见[exchangis部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_deploy_cn.md)和[exchangis-appconn部署文档](https://github.com/WeBankFinTech/Exchangis/blob/dev-1.0.0/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md) +  Exchangis1.0目前作为DSS**数据交换组件**的一部分,通过登录DSS的方式在组件列表中免密进入。所以,在使用Exchangis1.0之前,请对DSS,Exchangis1.0,Linkis等相关组件进行基本部署,保证组件功能可用,本文不进行赘述,详情见[exchangis部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)和[exchangis-appconn部署文档](docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md) ### 1、登录DSS diff --git a/exchangis-dao/pom.xml b/exchangis-dao/pom.xml index e200f6146..5776a5b52 100644 --- a/exchangis-dao/pom.xml +++ b/exchangis-dao/pom.xml @@ -5,7 +5,7 @@ exchangis com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -21,24 +21,20 @@ org.apache.linkis linkis-mybatis - ${linkis.version} org.apache.linkis linkis-module - ${linkis.version} - - - validation-api - javax.validation - - org.hibernate hibernate-validator ${hibernate.validator} + + org.springframework + spring-orm + diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java new file mode 100644 index 000000000..f42908668 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.common; + + +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; + +/** + * @author tikazhang + * @Date 2022/9/19 20:07 + */ +public class AuditLogUtils { + + + private static final Logger LOGGER = LoggerFactory.getLogger(AuditLogUtils.class); + + /** + * 打印审计日志,id类的属性都是String + * @param user 执行操作的用户名 + * @param targetType 操作针对的对象类型 + * @param targetId 操作针对的对象id + * @param targetName 操作针对的对象名称 + * @param operateType 操作类型 + * @param params 操作相关的参数 + */ + public static void printLog(String user, String proxyUser, TargetTypeEnum targetType, + String targetId, String targetName, OperateTypeEnum operateType, Object params) { + //String detailInfo=new Gson().toJson(params); + String detailInfo=params.toString(); + LOGGER.info("[{}],[{}],[{}],[{}],[{}],[{}],[{}],[{}],[{}]", + new Date(),user, "proxyUser is: " + proxyUser, "Exchangis-1.1.2", targetType.getName(), + targetId,targetName,operateType.getName(), detailInfo); + } + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java new file mode 100644 index 000000000..eaf5d6c38 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.common; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Environment utils + */ +public class EnvironmentUtils { + + private static final CommonVars JVM_USER = CommonVars.apply("wds.exchangis.env.jvm.user", System.getProperty("user.name", "hadoop")); + + /** + * Jvm user + * @return user name + */ + public static String getJvmUser(){ + return JVM_USER.getValue(); + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/UserUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/UserUtils.java new file mode 100644 index 000000000..216086ce1 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/UserUtils.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.common; + +import org.apache.linkis.server.security.ProxyUserSSOUtils; +import org.apache.linkis.server.security.SecurityFilter; +import scala.Option; + +import javax.servlet.http.HttpServletRequest; + +/** + * @author tikazhang + * @Date 2022/9/22 16:54 + */ +public class UserUtils { + public static String getLoginUser(HttpServletRequest request) { + Option proxyUserUsername = + ProxyUserSSOUtils.getProxyUserUsername(request); + String loginUser = null; + if (proxyUserUsername.isDefined()) { + loginUser = proxyUserUsername.get(); + } else { + loginUser = SecurityFilter.getLoginUsername(request); + } + return loginUser; + } + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/OperateTypeEnum.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/OperateTypeEnum.java new file mode 100644 index 000000000..99de7e712 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/OperateTypeEnum.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.common.enums; + +/** + * @author tikazhang + * @Date 2022/9/19 20:16 + */ +public enum OperateTypeEnum { + CREATE("create"), + UPDATE("update"), + DELETE("delete"), + COPY("copy"), + EXPORT("export"), + IMPORT("import"), + PUBLISH("publish"), + EXPIRE("expire"), + EXECUTE("execute"), + KILL("kill"), + ; + private String name; + OperateTypeEnum(String name) { + this.name = name; + } + + public String getName() { + return name; + } + public void setName(String name) { + this.name = name; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/TargetTypeEnum.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/TargetTypeEnum.java new file mode 100644 index 000000000..539767c1d --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/TargetTypeEnum.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.common.enums; + +/** + * @author tikazhang + * @Date 2022/9/19 20:22 + */ +public enum TargetTypeEnum { + /** + * 项目 + */ + PROJECT("project"), + /** + * 作业 + */ + JOB("job"), + /** + * 数据源 + */ + DATASOURCE("datasource"), + /** + * 任务 + */ + TASK("task"), + ; + private String name; + + TargetTypeEnum(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/ClientConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/ClientConfiguration.java new file mode 100644 index 000000000..3725610fd --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/ClientConfiguration.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.common.linkis; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; + +/** + * Configuration for linkis client + */ +public class ClientConfiguration { + + /** + * Linkis server url + */ + public static final CommonVars LINKIS_SERVER_URL = CommonVars.apply("wds.exchangis.client.linkis.server-url", Configuration.getGateWayURL()); + + /** + * Linkis token value + */ + public static final CommonVars LINKIS_TOKEN_VALUE = CommonVars.apply("wds.exchangis.client.linkis.token.value", "EXCHANGIS-TOKEN"); + + /** + * Linkis client max connections + */ + public static final CommonVars LINKIS_DEFAULT_MAX_CONNECTIONS = CommonVars.apply("wds.exchangis.client.linkis.max-connections.default", 70); + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/bml/BmlResource.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/bml/BmlResource.java new file mode 100644 index 000000000..0ea6f12af --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/bml/BmlResource.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.exchangis.common.linkis.bml; + +/** + * Bml resource definition + */ +public class BmlResource { + /** + * Resource id + */ + private String resourceId; + + /** + * Version + */ + private String version; + + public BmlResource(){ + + } + + public BmlResource(String resourceId, String version){ + this.resourceId = resourceId; + this.version = version; + } + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java index 40ce6e138..b50609c03 100644 --- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java @@ -63,6 +63,9 @@ public class ExchangisJobParamConfig { @TableField(value = "is_advanced") private Boolean advanced; + @TableField(value = "ref_id") + private Long refId; + /** * store url exa. http://127.0.0.1/api/v1/dss/exchangis/main/xxx */ @@ -265,4 +268,12 @@ public Boolean getRequired() { public void setRequired(Boolean required) { this.required = required; } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } } \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-core/pom.xml b/exchangis-datasource/exchangis-datasource-core/pom.xml index 98bfbccd5..990c92d6f 100644 --- a/exchangis-datasource/exchangis-datasource-core/pom.xml +++ b/exchangis-datasource/exchangis-datasource-core/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -20,14 +20,13 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 + 1.1.2 org.apache.linkis linkis-datasource-client - ${linkis.version} org.apache.linkis diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java index 1b09ba1e6..13ecdfdac 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java @@ -25,6 +25,7 @@ public interface ExchangisDataSource { String classifier(); // String type(); + String structClassifier(); // String category(); diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java new file mode 100644 index 000000000..4e385450c --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum Classifier { + + ELASTICSEARCH("分布式全文索引"), + + HIVE("大数据存储"), + + MONGODB("非关系型数据库"), + + MYSQL("关系型数据库"), + + SFTP("sftp连接"), + + ORACLE("关系型数据库"); + + public String name; + + Classifier(String name) { + this.name = name; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/DataSourceType.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/DataSourceType.java new file mode 100644 index 000000000..d0193c4a2 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/DataSourceType.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum DataSourceType { + + ELASTICSEARCH("ELASTICSEARCH"), + + HIVE("HIVE"), + + MONGODB("MONGODB"), + + MYSQL("MYSQL"), + + SFTP("SFTP"), + + ORACLE("ORACLE"); + + public String name; + + DataSourceType(String name) { + this.name = name; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/StructClassifier.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/StructClassifier.java new file mode 100644 index 000000000..e7aa660ac --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/StructClassifier.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum StructClassifier { + + STRUCTURED("结构化"), + + SEMI_STRUCTURED("半结构化"), + + NON_STRUCTURED("无结构化"); + + public String name; + + StructClassifier(String name) { + this.name = name; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java index 60400f70f..f68f696e8 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java @@ -10,7 +10,6 @@ public interface ExchangisDataSourceLoader { -// String EXCHANGIS_DIR_NAME = "exchangis-extds"; String EXCHANGIS_DIR_NAME = Objects.isNull(CommonVars.apply("wds.exchangis.datasource.extension.dir").getValue()) ? "exchangis-extds" : CommonVars.apply("wds.exchangis.datasource.extension.dir").getValue().toString(); String PROPERTIES_NAME = "extds.properties"; diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java index 04b7d78f3..1166eff4d 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java @@ -57,4 +57,11 @@ Map getTableProps(ServiceRpcClient rpcClient, String userName * @throws ExchangisDataSourceException */ List getColumns(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException; + + /** + * Get the default(local) hdfs information + * @param uri uri + * @return + */ + Map getLocalHdfsInfo(String uri) throws ExchangisDataSourceException; } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java index 1c2585228..88a23ab4d 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java @@ -5,6 +5,7 @@ import java.util.Map; public class InputElementUI implements ElementUI { + private Long id; private String key; private String field; private String label; @@ -17,6 +18,15 @@ public class InputElementUI implements ElementUI { private String validateRange; private String validateMsg; private String source; + private Long refId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } public String getSource() { return source; @@ -38,6 +48,14 @@ public void setField(String field) { this.field = field; } + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } + @Override public String getField() { return this.field; diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java index b8f5d8945..cf86a5dfc 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java @@ -6,6 +6,7 @@ import java.util.Objects; public class MapElementUI implements ElementUI> { + private Long id; private String key; private String field; private String label; @@ -18,6 +19,23 @@ public class MapElementUI implements ElementUI> { private String validateRange; private String validateMsg; private String source; + private Long refId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } public String getSource() { return source; diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java index 93e2da083..214de217a 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java @@ -6,6 +6,7 @@ import java.util.Map; public class OptionElementUI implements ElementUI { + private Long id; private String key; private String field; private String label; @@ -15,6 +16,23 @@ public class OptionElementUI implements ElementUI { private Integer sort; private String unit; private Boolean required; + private Long refId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } public String getKey() { return key; diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java index c19df0838..f588f5786 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java @@ -1,11 +1,17 @@ package com.webank.wedatasphere.exchangis.datasource.core.vo; +import com.fasterxml.jackson.annotation.JsonProperty; + import java.util.List; public class ExchangisJobTransformsContent { private boolean addEnable; private String type; private String sql; + + @JsonProperty("code_id") + private String codeId; + private List mapping; public boolean isAddEnable() { @@ -39,4 +45,12 @@ public List getMapping() { public void setMapping(List mapping) { this.mapping = mapping; } + + public String getCodeId() { + return codeId; + } + + public void setCodeId(String codeId) { + this.codeId = codeId; + } } diff --git a/exchangis-datasource/exchangis-datasource-linkis/pom.xml b/exchangis-datasource/exchangis-datasource-linkis/pom.xml index 42c6a9237..2ab7eda3d 100644 --- a/exchangis-datasource/exchangis-datasource-linkis/pom.xml +++ b/exchangis-datasource/exchangis-datasource-linkis/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -20,7 +20,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java index 610e063ba..5bbe000e2 100644 --- a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java @@ -1,13 +1,14 @@ package com.webank.wedatasphere.exchangis.datasource.linkis.service; +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; -import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisServiceRpcException; import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient; -//import com.webank.wedatasphere.exchangis.datasource.linkis.partition.MetadataGetPartitionsResult; +import com.webank.wedatasphere.exchangis.datasource.linkis.request.MetadataGetConnInfoAction; import com.webank.wedatasphere.exchangis.datasource.linkis.request.MetadataGetPartitionPropsAction; +import com.webank.wedatasphere.exchangis.datasource.linkis.response.MetadataGetConnInfoResult; import com.webank.wedatasphere.exchangis.datasource.linkis.response.MetadataGetPartitionPropsResult; import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceOperation; import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceRpcDispatcher; @@ -18,12 +19,9 @@ import org.apache.linkis.datasource.client.response.MetadataGetColumnsResult; import org.apache.linkis.datasource.client.response.MetadataGetPartitionsResult; import org.apache.linkis.datasource.client.response.MetadataGetTablePropsResult; -import org.apache.linkis.metadatamanager.common.domain.MetaColumnInfo; +import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; +import java.util.*; import static com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceExceptionCode.*; @@ -32,7 +30,8 @@ */ public class LinkisMetadataInfoService extends LinkisDataSourceServiceRpcDispatcher implements MetadataInfoService { - + // TODO define in properties file + private static final String LOCAL_HDFS_NAME = ".LOCAL_HDFS"; @Override public Class getClientClass() { return LinkisMetaDataRemoteClient.class; @@ -98,5 +97,17 @@ public List getColumns(String userName, Long dataSourceId, String da return columns; } + @Override + public Map getLocalHdfsInfo(String uri) throws ExchangisDataSourceException{ + Map query = new HashMap<>(); + query.put("uri", uri); + MetadataGetConnInfoResult result = dispatch(getDefaultRemoteClient(), new LinkisDataSourceServiceOperation(() -> { + MetadataGetConnInfoAction action = new MetadataGetConnInfoAction(LOCAL_HDFS_NAME, LINKIS_RPC_CLIENT_SYSTEM.getValue(), query); + action.setUser(EnvironmentUtils.getJvmUser()); + return action; + }), CLIENT_METADATA_GET_PARTITION.getCode(), "getLocalHdfsInfo"); + return result.getInfo(); + } + } diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala index 7c25d8728..a8126a04c 100644 --- a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala @@ -1,5 +1,6 @@ package com.webank.wedatasphere.exchangis.datasource.linkis +import com.webank.wedatasphere.exchangis.common.linkis.ClientConfiguration import org.apache.linkis.datasource.client.impl.{LinkisDataSourceRemoteClient, LinkisMetaDataRemoteClient} import org.apache.linkis.datasource.client.request._ import org.apache.linkis.datasource.client.response._ @@ -12,7 +13,8 @@ import java.util.concurrent.TimeUnit object ExchangisLinkisRemoteClient { //Linkis Datasource Client Config - val serverUrl: String = ExchangisDataSourceConfiguration.SERVER_URL.getValue + val serverUrl: String = ClientConfiguration.LINKIS_SERVER_URL.getValue + val authTokenValue: String = ClientConfiguration.LINKIS_TOKEN_VALUE.getValue val connectionTimeout: lang.Long = ExchangisDataSourceConfiguration.CONNECTION_TIMEOUT.getValue val discoveryEnabled: lang.Boolean = ExchangisDataSourceConfiguration.DISCOVERY_ENABLED.getValue val discoveryFrequencyPeriod: lang.Long = ExchangisDataSourceConfiguration.DISCOVERY_FREQUENCY_PERIOD.getValue @@ -20,8 +22,6 @@ object ExchangisLinkisRemoteClient { val maxConnectionSize: Integer = ExchangisDataSourceConfiguration.MAX_CONNECTION_SIZE.getValue val retryEnabled: lang.Boolean = ExchangisDataSourceConfiguration.RETRY_ENABLED.getValue val readTimeout: lang.Long = ExchangisDataSourceConfiguration.READ_TIMEOUT.getValue - val authTokenKey: String = ExchangisDataSourceConfiguration.AUTHTOKEN_KEY.getValue - val authTokenValue: String = ExchangisDataSourceConfiguration.AUTHTOKEN_VALUE.getValue val dwsVersion: String = ExchangisDataSourceConfiguration.DWS_VERSION.getValue @@ -50,7 +50,6 @@ object ExchangisLinkisRemoteClient { .retryEnabled(retryEnabled) .readTimeout(readTimeout) .setAuthenticationStrategy(new TokenAuthenticationStrategy()) - .setAuthTokenKey(authTokenKey) .setAuthTokenValue(authTokenValue) .setDWSVersion(dwsVersion) .build() diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetConnInfoAction.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetConnInfoAction.scala new file mode 100644 index 000000000..d8050a648 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetConnInfoAction.scala @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.request + +import org.apache.linkis.datasource.client.request.DataSourceAction +import org.apache.linkis.httpclient.request.GetAction +import java.util +import scala.collection.JavaConverters._ +/** + * Get connection info action + */ +class MetadataGetConnInfoAction(dataSourceName: String, system: String, query: util.Map[String, Any]) extends GetAction with DataSourceAction{ + + setParameter("dataSourceName", dataSourceName); + setParameter("system", system); + + Option(query) match { + case Some(queryParams) => + queryParams.asScala.foreach(param => setParameter(param._1, param._2)) + case _ => + } + private var user: String = _ + + override def suffixURLs: Array[String] = Array("metadataQuery", "getConnectionInfo") + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = this.user +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetConnInfoResult.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetConnInfoResult.scala new file mode 100644 index 000000000..8c5402adc --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetConnInfoResult.scala @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import scala.beans.BeanProperty +import java.util +/** + * Result of get connection info + */ +@DWSHttpMessageResult("/api/rest_j/v\\d+/metadataQuery/getConnectionInfo") +class MetadataGetConnInfoResult extends DWSResult{ + @BeanProperty var info: util.Map[String, String] = _ +} diff --git a/exchangis-datasource/exchangis-datasource-loader/pom.xml b/exchangis-datasource/exchangis-datasource-loader/pom.xml index 6a16f934c..30ebb07ad 100644 --- a/exchangis-datasource/exchangis-datasource-loader/pom.xml +++ b/exchangis-datasource/exchangis-datasource-loader/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -20,7 +20,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java index 7e147d16d..50f6f59e3 100644 --- a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java @@ -7,6 +7,7 @@ import com.webank.wedatasphere.exchangis.datasource.loader.clazzloader.ExchangisDataSourceClassLoader; import com.webank.wedatasphere.exchangis.datasource.loader.utils.ExceptionHelper; import com.webank.wedatasphere.exchangis.datasource.loader.utils.ExtDsUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.linkis.common.exception.ErrorException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,8 +38,10 @@ public void setContext(ExchangisDataSourceContext context) { public void init(MapperHook mapperHook) throws Exception { // 初始化磁盘扫描加载 ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); - String loadClassPath = Objects.requireNonNull(currentClassLoader.getResource("")).getPath(); -// String libPathUrl = loadClassPath + ".." + File.separator + ".." + File.separator + EXCHANGIS_DIR_NAME; + String loadClassPath = Objects.requireNonNull(currentClassLoader.getResource(EXCHANGIS_DIR_NAME)).getPath(); + if (StringUtils.endsWith(loadClassPath, File.separator)) { + loadClassPath = loadClassPath + File.separator; + } String libPathUrl = loadClassPath + ".." + File.separator + EXCHANGIS_DIR_NAME; LOGGER.info("libPath url is {}", libPathUrl); List jars = ExtDsUtils.getJarsUrlsOfPath(libPathUrl); diff --git a/exchangis-datasource/exchangis-datasource-server/pom.xml b/exchangis-datasource/exchangis-datasource-server/pom.xml index f929e0a42..c41abaa25 100644 --- a/exchangis-datasource/exchangis-datasource-server/pom.xml +++ b/exchangis-datasource/exchangis-datasource-server/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -20,13 +20,13 @@ com.webank.wedatasphere.exchangis exchangis-datasource-service - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-datasource-loader - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml b/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml index 785dec1f4..997641614 100644 --- a/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml +++ b/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml @@ -59,6 +59,42 @@ 0755 exchangis-extds + + ${basedir}/../extension-datasources/exchangis-datasource-ext-elasticsearch/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-mongodb/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-oracle/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java index 00e6b5c2d..2e4bdd724 100644 --- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java @@ -1,12 +1,9 @@ package com.webank.wedatasphere.exchangis.datasource.server.restful.api; -import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.common.UserUtils; import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; -import com.webank.wedatasphere.exchangis.datasource.core.ui.builder.ElementUIFactory; import com.webank.wedatasphere.exchangis.datasource.service.DataSourceRenderService; -import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; import org.apache.linkis.server.Message; -import org.apache.linkis.server.security.SecurityFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.bind.annotation.*; @@ -33,7 +30,7 @@ public Message partition(@PathVariable("elementType") String type, @RequestParam("dataSourceId") Long dataSourceId, @RequestParam("database") String database, @RequestParam("table") String table, HttpServletRequest request){ - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); ElementUI.Type uiType; try { uiType = ElementUI.Type.valueOf(type.toUpperCase(Locale.ROOT)); diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java index 26803afb6..474a94439 100644 --- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java @@ -1,5 +1,9 @@ package com.webank.wedatasphere.exchangis.datasource.server.restful.api; +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; import com.webank.wedatasphere.exchangis.datasource.service.ExchangisDataSourceService; @@ -7,12 +11,12 @@ import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceQueryVO; import com.webank.wedatasphere.exchangis.datasource.vo.FieldMappingVO; import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; -import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; @@ -40,10 +44,14 @@ public ExchangisDataSourceRestfulApi(ExchangisDataSourceService exchangisDataSou // list all datasource types @RequestMapping( value = "/type", method = RequestMethod.GET) - public Message listDataSourceTypes(HttpServletRequest request) throws Exception { + public Message listDataSourceTypes(HttpServletRequest request, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "direct", required = false) String direct, + @RequestParam(value = "sourceType", required = false) String sourceType) throws Exception { Message message = null; + LOG.info("engineType:{}, direct:{}, sourceType:{}", engineType, direct, sourceType); try{ - message = exchangisDataSourceService.listDataSources(request); + message = exchangisDataSourceService.listDataSources(request, engineType, direct, sourceType); } catch (ExchangisDataSourceException e) { String errorMessage = "Error occur while list datasource type"; LOG.error(errorMessage, e); @@ -152,6 +160,8 @@ public Message getDataSourceVersionsById(HttpServletRequest request, @PathVariab @RequestMapping( value = "", method = RequestMethod.POST) public Message create(/*@PathParam("type") String type, */@Valid @RequestBody DataSourceCreateVO dataSourceCreateVO, BindingResult bindingResult, HttpServletRequest request ) throws Exception { Message message = new Message(); + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + "dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + "label: " + dataSourceCreateVO.getLabels()); if(bindingResult.hasErrors()){ List fieldErrors = bindingResult.getFieldErrors(); @@ -176,6 +186,7 @@ public Message create(/*@PathParam("type") String type, */@Valid @RequestBody Da } } } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.DATASOURCE,"0", "DataSource name is: " + dataSourceCreateVO.getDataSourceName(), OperateTypeEnum.CREATE,request); return message; } @@ -221,6 +232,8 @@ public Message getDataSourceConnectParamsById(HttpServletRequest request, @PathV public Message update(HttpServletRequest request,/* @PathParam("type") String type, */@PathVariable("id") Long id, @Valid @RequestBody DataSourceCreateVO dataSourceCreateVO, BindingResult bindingResult) throws Exception { Message message = new Message(); + String oringinUser = SecurityFilter.getLoginUsername(request); + String loginUser = UserUtils.getLoginUser(request); LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + "dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + "label: " + dataSourceCreateVO.getLabels()); if(bindingResult.hasErrors()){ List fieldErrors = bindingResult.getFieldErrors(); @@ -246,6 +259,7 @@ public Message update(HttpServletRequest request,/* @PathParam("type") String ty } } } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.DATASOURCE, id.toString(), "DataSource name is: " + dataSourceCreateVO.getDataSourceName(), OperateTypeEnum.UPDATE,request); return message; } @@ -255,6 +269,8 @@ public Message update(HttpServletRequest request,/* @PathParam("type") String ty public Message publishDataSource(HttpServletRequest request,/* @PathParam("type") String type, */@PathVariable("id") Long id, @PathVariable("version") Long version) throws Exception { Message message = null; + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); try{ message = exchangisDataSourceService.publishDataSource(request, /*type, */id, version); } catch (ExchangisDataSourceException e) { @@ -270,6 +286,7 @@ public Message publishDataSource(HttpServletRequest request,/* @PathParam("type" message = Message.error("Publish datasource failed (发布数据源失败)"); } } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.DATASOURCE, id.toString(), "DataSource publish", OperateTypeEnum.PUBLISH, request); return message; } @@ -278,6 +295,8 @@ public Message publishDataSource(HttpServletRequest request,/* @PathParam("type" @RequestMapping( value = "/{id}/expire", method = RequestMethod.PUT) public Message expireDataSource(HttpServletRequest request,/* @PathParam("type") String type, */@PathVariable("id") Long id) throws Exception { Message message = null; + String oringinUser = SecurityFilter.getLoginUsername(request); + String loginUser = UserUtils.getLoginUser(request); try{ message = exchangisDataSourceService.expireDataSource(request, /*type, */id); } catch (ExchangisDataSourceException e) { @@ -285,6 +304,7 @@ public Message expireDataSource(HttpServletRequest request,/* @PathParam("type") LOG.error(errorMessage, e); message = Message.error("过期数据源失败"); } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.DATASOURCE, id.toString(), "DataSource expire", OperateTypeEnum.PUBLISH, request); return message; } @@ -338,6 +358,8 @@ public Message testConnectByMap(HttpServletRequest request,/* @PathParam("type") @RequestMapping( value = "/{id}", method = RequestMethod.DELETE) public Message delete(HttpServletRequest request, /*@PathParam("type") String type, */@PathVariable("id") Long id) throws Exception { Message message = null; + String oringinUser = SecurityFilter.getLoginUsername(request); + String loginUser = UserUtils.getLoginUser(request); try{ message = exchangisDataSourceService.deleteDataSource(request, /*type, */id); } catch (ExchangisDataSourceException e) { @@ -345,6 +367,7 @@ public Message delete(HttpServletRequest request, /*@PathParam("type") String ty LOG.error(errorMessage, e); message = Message.error("删除数据源失败,存在引用依赖"); } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.DATASOURCE, id.toString(), "DataSource delete", OperateTypeEnum.DELETE, request); return message; } @@ -405,6 +428,35 @@ public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, @ return message; } + @RequestMapping( value = "/tools/encrypt", method = RequestMethod.POST) + public Message sourceStrEncrypt(HttpServletRequest request, @RequestBody Map params, @QueryParam(value = "encryStr") String encryStr) throws Exception { + Message message = null; + try{ + LOG.info("Encrypt params is: {}", params); + message = exchangisDataSourceService.encryptConnectInfo((String) params.get("encryStr")); + //message = Message.ok().data("encryStr", "owwonowoww"); + } catch (Exception e) { + String errorMessage = "Encrypted string failed"; + LOG.error(errorMessage, e); + message = Message.error("加密字符串失败"); + } + return message; + } + + @RequestMapping( value = "/tools/decrypt", method = RequestMethod.POST) + public Message sinkStrDecrypt(HttpServletRequest request, @RequestBody Map params, @QueryParam(value = "sinkStr") String sinkStr) throws Exception { + Message message = null; + try{ + message = exchangisDataSourceService.decryptConnectInfo((String) params.get("sinkStr")); + //message = Message.ok().data("encryStr", "owwonowoww"); + } catch (Exception e) { + String errorMessage = "Encrypted string failed"; + LOG.error(errorMessage, e); + message = Message.error("加密字符串失败"); + } + return message; + } + @RequestMapping( value = "/{engine}/{type}/params/ui", method = RequestMethod.GET) public Message getParamsUI( HttpServletRequest request, diff --git a/exchangis-datasource/exchangis-datasource-service/pom.xml b/exchangis-datasource/exchangis-datasource-service/pom.xml index f4bbe8739..e41e65f03 100644 --- a/exchangis-datasource/exchangis-datasource-service/pom.xml +++ b/exchangis-datasource/exchangis-datasource-service/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -21,14 +21,20 @@ com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-job-common - 1.0.0 + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-engine-core + 1.1.2 + compile diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java new file mode 100644 index 000000000..d75996d99 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java @@ -0,0 +1,90 @@ +package com.webank.wedatasphere.exchangis.datasource.Utils; + +import org.apache.linkis.common.conf.CommonVars; +import sun.misc.BASE64Decoder; +import sun.misc.BASE64Encoder; + +import javax.crypto.Cipher; +import java.io.IOException; +import java.security.*; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; + +/** + * @author tikazhang + * @Date 2022/8/4 10:35 + */ +public class RSAUtil { + + public static final CommonVars PUBLIC_KEY_STR = CommonVars.apply("wds.exchangis.publicKeyStr", "publicKeyStr"); + public static final CommonVars PRIVATE_KEY_STR = CommonVars.apply("wds.exchangis.privateKeyStr", "privateKeyStr"); + + + //生成秘钥对 + public static KeyPair getKeyPair() throws Exception { + KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(2048); + KeyPair keyPair = keyPairGenerator.generateKeyPair(); + return keyPair; + } + + //获取公钥(Base64编码) + public static String getPublicKey(KeyPair keyPair){ + PublicKey publicKey = keyPair.getPublic(); + byte[] bytes = publicKey.getEncoded(); + return byte2Base64(bytes); + } + + //获取私钥(Base64编码) + public static String getPrivateKey(KeyPair keyPair){ + PrivateKey privateKey = keyPair.getPrivate(); + byte[] bytes = privateKey.getEncoded(); + return byte2Base64(bytes); + } + + //将Base64编码后的公钥转换成PublicKey对象 + public static PublicKey string2PublicKey(String pubStr) throws Exception{ + byte[] keyBytes = base642Byte(pubStr); + X509EncodedKeySpec keySpec = new X509EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PublicKey publicKey = keyFactory.generatePublic(keySpec); + return publicKey; + } + + //将Base64编码后的私钥转换成PrivateKey对象 + public static PrivateKey string2PrivateKey(String priStr) throws Exception{ + byte[] keyBytes = base642Byte(priStr); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PrivateKey privateKey = keyFactory.generatePrivate(keySpec); + return privateKey; + } + + //公钥加密 + public static byte[] publicEncrypt(byte[] content, PublicKey publicKey) throws Exception{ + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.ENCRYPT_MODE, publicKey); + byte[] bytes = cipher.doFinal(content); + return bytes; + } + + //私钥解密 + public static byte[] privateDecrypt(byte[] content, PrivateKey privateKey) throws Exception{ + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.DECRYPT_MODE, privateKey); + byte[] bytes = cipher.doFinal(content); + return bytes; + } + + //字节数组转Base64编码 + public static String byte2Base64(byte[] bytes){ + BASE64Encoder encoder = new BASE64Encoder(); + return encoder.encode(bytes); + } + + //Base64编码转字节数组 + public static byte[] base642Byte(String base64Key) throws IOException { + BASE64Decoder decoder = new BASE64Decoder(); + return decoder.decodeBuffer(base64Key); + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java index 246a6646b..2ee15e9ed 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java @@ -18,6 +18,10 @@ public class DataSourceDTO { private String modifyUser; private Date modifyTime; private boolean expire; + private boolean writeAble; + private boolean readAble; + private String authDbs; + private String authTbls; public boolean isExpire() { return expire; @@ -130,4 +134,36 @@ public String getCreateSystem() { public void setCreateSystem(String createSystem) { this.createSystem = createSystem; } + + public boolean isWriteAble() { + return writeAble; + } + + public void setWriteAble(boolean writeAble) { + this.writeAble = writeAble; + } + + public boolean isReadAble() { + return readAble; + } + + public void setReadAble(boolean readAble) { + this.readAble = readAble; + } + + public String getAuthDbs() { + return authDbs; + } + + public void setAuthDbs(String authDbs) { + this.authDbs = authDbs; + } + + public String getAuthTbls() { + return authTbls; + } + + public void setAuthTbls(String authTbls) { + this.authTbls = authTbls; + } } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java index 809ea11d2..b6a2fff3a 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java @@ -7,11 +7,13 @@ public class ExchangisDataSourceDTO { private String option; private String description; private String icon; + private String struct_classifier; - public ExchangisDataSourceDTO(String id, String classifier, String name) { + public ExchangisDataSourceDTO(String id, String classifier, String name, String struct_classifier) { this.id = id; this.classifier = classifier; this.name = name; + this.struct_classifier = struct_classifier; } public String getId() { @@ -49,4 +51,12 @@ public String getDescription() { public String getIcon() { return icon; } + + public String getStruct_classifier() { + return struct_classifier; + } + + public void setStruct_classifier(String struct_classifier) { + this.struct_classifier = struct_classifier; + } } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java index 75faf4f12..8688554be 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java @@ -5,6 +5,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; +import com.webank.wedatasphere.exchangis.common.UserUtils; import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; @@ -23,7 +24,6 @@ import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; import org.apache.linkis.datasource.client.request.GetInfoByDataSourceIdAction; import org.apache.linkis.httpclient.response.Result; -import org.apache.linkis.server.security.SecurityFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,7 +64,7 @@ private ExchangisDataSourceIdsUI buildDataSourceIdsUI(ExchangisJobInfoContent co } private ExchangisDataSourceIdsUI buildDataSourceIdsUI(HttpServletRequest request, ExchangisJobInfoContent content) { - String loginUser = Optional.ofNullable(request).isPresent() ? SecurityFilter.getLoginUsername(request) : null; + String loginUser = Optional.ofNullable(request).isPresent() ? UserUtils.getLoginUser(request) : null; ExchangisJobDataSourcesContent dataSources = content.getDataSources(); if (Objects.isNull(dataSources)) { return null; @@ -305,6 +305,7 @@ private OptionElementUI fillOptionElementUIValue(ExchangisJobParamConfig config, } OptionElementUI ui = new OptionElementUI(); + ui.setId(config.getId()); ui.setKey(config.getConfigKey()); ui.setField(config.getUiField()); ui.setLabel(config.getUiLabel()); @@ -314,11 +315,13 @@ private OptionElementUI fillOptionElementUIValue(ExchangisJobParamConfig config, ui.setSort(config.getSort()); ui.setRequired(config.getRequired()); ui.setUnit(config.getUnit()); + ui.setRefId(config.getRefId()); return ui; } private InputElementUI fillInputElementUIValue(ExchangisJobParamConfig config, String value) { InputElementUI ui = new InputElementUI(); + ui.setId(config.getId()); ui.setKey(config.getConfigKey()); ui.setField(config.getUiField()); ui.setLabel(config.getUiLabel()); @@ -331,11 +334,13 @@ private InputElementUI fillInputElementUIValue(ExchangisJobParamConfig config, S ui.setValidateType(config.getValidateType()); ui.setValidateRange(config.getValidateRange()); ui.setValidateMsg(config.getValidateMsg()); + ui.setRefId(config.getRefId()); return ui; } private MapElementUI fillMapElementUIValue(ExchangisJobParamConfig config, Map value) { MapElementUI ui = new MapElementUI(); + ui.setId(config.getId()); ui.setKey(config.getConfigKey()); ui.setField(config.getUiField()); ui.setLabel(config.getUiLabel()); @@ -348,6 +353,7 @@ private MapElementUI fillMapElementUIValue(ExchangisJobParamConfig config, Map> getDataSourceParamsUI(String dsType, String engineAndD String[] engineDirect = engineAndDirection.split("-"); String direction = engineDirect[1]; for (ExchangisJobParamConfig paramConfig : paramConfigs) { + //skip the Optional.ofNullable(paramConfig.getConfigDirection()).ifPresent(configDirection -> { if (configDirection.equalsIgnoreCase(engineAndDirection) || configDirection.equalsIgnoreCase(direction)){ filteredConfigs.add(paramConfig); @@ -127,11 +137,46 @@ public List> getJobEngineSettingsUI(String engineType) { * 根据 LocalExchangisDataSourceLoader 加载到的本地的数据源与 Linkis 支持的数据源 * 做比较,筛选出可以给前端展示的数据源类型 */ - public Message listDataSources(HttpServletRequest request) throws Exception { + public Message listDataSources(HttpServletRequest request, String engineType, String direct, String sourceType) throws Exception { Collection all = this.context.all(); List dtos = new ArrayList<>(); - String userName = SecurityFilter.getLoginUsername(request); + List settingsList = this.settingsDao.getSettings(); + List engineSettings = new ArrayList<>(); + + + if (StringUtils.isEmpty(engineType)) { + engineSettings = settingsList; + } else { + EngineSettings engineSetting = new EngineSettings(); + for (int i = 0; i < settingsList.size(); i++) { + if (StringUtils.equals(settingsList.get(i).getName(), engineType.toLowerCase())) { + engineSetting = settingsList.get(i); + break; + } + } + engineSettings.add(engineSetting); + } + + Set directType = new HashSet<>(); + for (EngineSettings engineSetting: engineSettings) { + if (StringUtils.isEmpty(direct)) { + for (int i = 0; i < engineSetting.getDirectionRules().size(); i++) { + directType.add(engineSetting.getDirectionRules().get(i).getSource()); + directType.add(engineSetting.getDirectionRules().get(i).getSink()); + } + } else { + for (int i = 0; i < engineSetting.getDirectionRules().size(); i++) { + if ((StringUtils.equals(direct.toLowerCase(), "source"))) { + directType.add(engineSetting.getDirectionRules().get(i).getSource()); + } else { + directType.add(engineSetting.getDirectionRules().get(i).getSink()); + } + } + } + } + + String userName = UserUtils.getLoginUser(request); LOGGER.info("listDataSources userName: {}" + userName); // 通过 datasourcemanager 获取的数据源类型和context中的数据源通过 type 和 name 比较 // 以 exchangis 中注册了的数据源集合为准 @@ -151,17 +196,25 @@ public Message listDataSources(HttpServletRequest request) throws Exception { throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_GET_TYPES_ERROR.getCode(), "datasource get types null or empty"); } - List allDataSourceType = result.getAllDataSourceType(); + List allDataSourceType = new ArrayList<>(); + List dataSourceTypes = result.getAllDataSourceType(); + for ( int i = 0; i < dataSourceTypes.size(); i++) { + if (directType.contains(dataSourceTypes.get(i).getName())) { + allDataSourceType.add(dataSourceTypes.get(i)); + } + } if (Objects.isNull(allDataSourceType)) allDataSourceType = Collections.emptyList(); for (DataSourceType type : allDataSourceType) { + LOGGER.info("Current datasource Type is :{}", type.getName()); for (ExchangisDataSource item : all) { if (item.name().equalsIgnoreCase(type.getName())) { ExchangisDataSourceDTO dto = new ExchangisDataSourceDTO( type.getId(), type.getClassifier(), // item.classifier(), - item.name() + item.name(), + item.structClassifier() ); // dto.setDescription(item.description()); // dto.setIcon(item.icon()); @@ -170,7 +223,10 @@ public Message listDataSources(HttpServletRequest request) throws Exception { dto.setIcon(type.getIcon()); dto.setDescription(type.getDescription()); dto.setOption(type.getOption()); - dtos.add(dto); + if (sourceType == null || !sourceType.toLowerCase().equals(type.getName())) { + //LOGGER.info("sourceType:{}, typename: {}", sourceType.toLowerCase(), type.getName()); + dtos.add(dto); + } } } } @@ -199,7 +255,7 @@ public Message create(HttpServletRequest request, /*String type, */DataSourceCre } - String user = SecurityFilter.getLoginUsername(request); + String user = UserUtils.getLoginUser(request); LOGGER.info("createDatasource userName:" + user); ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(vo.getDataSourceTypeId()); @@ -281,7 +337,7 @@ public Message updateDataSource(HttpServletRequest request,/* String type,*/ Lon if (Strings.isNullOrEmpty(createSystem)) { throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "parameter createSystem should not be empty"); } - String user = SecurityFilter.getLoginUsername(request); + String user = UserUtils.getLoginUser(request); LOGGER.info("updateDataSource userName:" + user); LOGGER.info("DataSourceTypeId:" + vo.getDataSourceTypeId()); @@ -345,7 +401,7 @@ public Message deleteDataSource(HttpServletRequest request, /*String type,*/ Lon QueryWrapper condition = new QueryWrapper<>(); condition.eq("source_ds_id", id).or().eq("sink_ds_id", id); - Integer inUseCount = this.exchangisJobDsBindMapper.selectCount(condition); + Long inUseCount = this.exchangisJobDsBindMapper.selectCount(condition); if (inUseCount > 0) { throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_DELETE_ERROR.getCode(), "目前存在引用依赖"); } @@ -355,7 +411,7 @@ public Message deleteDataSource(HttpServletRequest request, /*String type,*/ Lon String responseBody; try { - String user = SecurityFilter.getLoginUsername(request); + String user = UserUtils.getLoginUser(request); LOGGER.info("deleteDataSource userName:" + user); // result = dataSourceRemoteClient.deleteDataSource( // new DeleteDataSourceAction.Builder().setUser(user).setResourceId(id+"").builder() @@ -388,7 +444,7 @@ public Message queryDataSourceDBs(HttpServletRequest request, String type, Long ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(type); LinkisMetaDataRemoteClient metaDataRemoteClient = exchangisDataSource.getMetaDataRemoteClient(); - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("queryDataSourceDBs userName:" + userName); MetadataGetDatabasesResult databases; try { @@ -412,7 +468,7 @@ public Message queryDataSourceDBs(HttpServletRequest request, String type, Long } public Message queryDataSourceDBTables(HttpServletRequest request, String type, Long id, String dbName) throws Exception { - String user = SecurityFilter.getLoginUsername(request); + String user = UserUtils.getLoginUser(request); LOGGER.info("queryDataSourceDBTables userName:" + user); ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(type); @@ -533,7 +589,7 @@ public Message queryDataSourceDBTableFields(HttpServletRequest request, String t ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(type); LinkisMetaDataRemoteClient metaDataRemoteClient = exchangisDataSource.getMetaDataRemoteClient(); - String user = SecurityFilter.getLoginUsername(request); + String user = UserUtils.getLoginUser(request); LOGGER.info("queryDataSourceDBTableFields userName:" + user); List allColumns; try { @@ -569,14 +625,15 @@ public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo if (null == vo) { vo = new DataSourceQueryVO(); } - String username = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); LOGGER.info("queryDataSources userName:" + username); Integer page = Objects.isNull(vo.getPage()) ? 1 : vo.getPage(); Integer pageSize = Objects.isNull(vo.getPageSize()) ? 100 : vo.getPageSize(); - String dataSourceName = Objects.isNull(vo.getName()) ? "" : vo.getName(); + String dataSourceName = Objects.isNull(vo.getName()) ? "" : vo.getName().replace("_", "\\_"); LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); QueryDataSourceResult result; + int totalPage = 0; try { QueryDataSourceAction.Builder builder = QueryDataSourceAction.builder() .setSystem("system") @@ -599,6 +656,7 @@ public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo QueryDataSourceAction action = builder.build(); result = linkisDataSourceRemoteClient.queryDataSource(action); + totalPage = result.getTotalPage(); } catch (Exception e) { if (e instanceof ErrorException) { ErrorException ee = (ErrorException) e; @@ -613,35 +671,61 @@ public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo List allDataSource = result.getAllDataSource(); + List originDataSources = new ArrayList<>(); List dataSources = new ArrayList<>(); allDataSource.forEach(ds -> { - DataSourceDTO item = new DataSourceDTO(); - item.setId(ds.getId()); - item.setCreateIdentify(ds.getCreateIdentify()); - item.setName(ds.getDataSourceName()); - item.setType(ds.getCreateSystem()); - item.setCreateSystem(ds.getCreateSystem()); - item.setDataSourceTypeId(ds.getDataSourceTypeId()); - item.setLabels(ds.getLabels()); - item.setLabel(ds.getLabels()); - item.setDesc(ds.getDataSourceDesc()); - item.setCreateUser(ds.getCreateUser()); - item.setModifyUser(ds.getModifyUser()); - item.setModifyTime(ds.getModifyTime()); - item.setVersionId(ds.getVersionId()); - item.setExpire(ds.isExpire()); - dataSources.add(item); - }); + DataSourceDTO item = new DataSourceDTO(); + item.setId(ds.getId()); + item.setCreateIdentify(ds.getCreateIdentify()); + item.setName(ds.getDataSourceName()); + item.setType(ds.getCreateSystem()); + item.setCreateSystem(ds.getCreateSystem()); + item.setDataSourceTypeId(ds.getDataSourceTypeId()); + item.setLabels(ds.getLabels()); + item.setLabel(ds.getLabels()); + item.setDesc(ds.getDataSourceDesc()); + item.setCreateUser(ds.getCreateUser()); + item.setModifyUser(ds.getModifyUser()); + item.setModifyTime(ds.getModifyTime()); + item.setVersionId(ds.getVersionId()); + item.setExpire(ds.isExpire()); + item.setReadAble(true); + item.setWriteAble(true); + item.setAuthDbs(""); + item.setAuthTbls(""); + originDataSources.add(item); + }); + String direct = vo.getDirect(); + LOGGER.info("direct is: {}", direct); + LOGGER.info("originDatasource is: {}", originDataSources); + if (direct!=null) { + if ("source".equals(direct)) { + for (DataSourceDTO originDataSource : originDataSources) { + if (originDataSource.isReadAble()) { + dataSources.add(originDataSource); + } + } + } else if ("sink".equals(direct)) { + for (DataSourceDTO originDataSource : originDataSources) { + if (originDataSource.isReadAble()) { + dataSources.add(originDataSource); + } + } + } + } + else { + dataSources.addAll(originDataSources); + } Message message = Message.ok(); message.data("list", dataSources); - message.data("total", result.getTotalPage()); + message.data("total", totalPage); return message; //return Message.ok().data("list", dataSources); } public Message listAllDataSources(HttpServletRequest request, String typeName, Long typeId, Integer page, Integer pageSize) throws ExchangisDataSourceException { - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("listAllDataSources userName:" + userName); LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); @@ -699,7 +783,7 @@ public Message listAllDataSources(HttpServletRequest request, String typeName, L } public Message getDataSource(HttpServletRequest request, Long id, String versionId) throws ErrorException { - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("getDataSource userName:" + userName); GetDataSourceInfoResultDTO result; if (Strings.isNullOrEmpty(versionId)) { @@ -894,7 +978,7 @@ private MetadataGetTablePropsResultDTO getDatasourceMetadata(String username, Lo public Message getDataSourceVersionsById(HttpServletRequest request, Long id) throws ErrorException { LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("getDataSourceVersionsById userName:" + userName); // GetInfoByDataSourceIdResult result; GetDataSourceInfoResultDTO result; @@ -998,7 +1082,7 @@ public Message getDataSourceVersionsById(HttpServletRequest request, Long id) th public Message testConnect(HttpServletRequest request, Long id, Long version) throws ErrorException { LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("testConnect userName:" + userName); DataSourceTestConnectResult result; try { @@ -1027,7 +1111,7 @@ public Message testConnect(HttpServletRequest request, Long id, Long version) th public Message testConnectByVo(HttpServletRequest request, DataSourceCreateVO vo) throws ErrorException { LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("testConnect userName:" + userName); Map json; @@ -1065,7 +1149,7 @@ public Message testConnectByVo(HttpServletRequest request, DataSourceCreateVO vo public Message publishDataSource(HttpServletRequest request, Long id, Long version) throws ErrorException { LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("publishDataSource userName:" + userName); PublishDataSourceVersionResult result; try { @@ -1092,7 +1176,7 @@ public Message publishDataSource(HttpServletRequest request, Long id, Long versi } public Message getDataSourceConnectParamsById(HttpServletRequest request, Long id) throws ErrorException { - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("getDataSourceConnectParamsById userName:" + userName); GetConnectParamsByDataSourceIdResult result = getDataSourceConnectParamsById(userName, id); return Message.ok().data("info", Objects.isNull(result.getConnectParams()) ? null : result.getConnectParams()); @@ -1103,7 +1187,7 @@ public Message expireDataSource(HttpServletRequest request, Long id) throws Erro LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); String responseBody; - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("getDataSourceConnectParamsById userName:" + userName); // ExpireDataSourceResult result; try { @@ -1166,9 +1250,10 @@ public Message getDataSourceKeyDefine(HttpServletRequest request, Long dataSourc if (Objects.isNull(dataSourceTypeId)) { throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "dataSourceType id should not be null"); } + Message message = Message.ok(); LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); - String userName = SecurityFilter.getLoginUsername(request); + String userName = UserUtils.getLoginUser(request); LOGGER.info("getDataSourceKeyDefine userName:" + userName); GetKeyTypeDatasourceResult result; try { @@ -1191,7 +1276,10 @@ public Message getDataSourceKeyDefine(HttpServletRequest request, Long dataSourc throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); } - return Message.ok().data("list", Objects.isNull(result.getKeyDefine()) ? null : result.getKeyDefine()); + message.data("list", Objects.isNull(result.getKeyDefine()) ? null : result.getKeyDefine()); + //message.data("list", result.getDataSourceParamKeyDefinitions()); + return message; + //return Message.ok().data("list", Objects.isNull(result.getKeyDefine()) ? null : result.getKeyDefine()); } public void checkDSSupportDegree(String engine, String sourceDsType, String sinkDsType) throws ExchangisDataSourceException { @@ -1241,7 +1329,7 @@ public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, F for (int i = 0; i < sourceFields.size(); i++) { DataSourceDbTableColumnDTO field = sourceFields.get(i); field.setFieldIndex(i); - field.setFieldEditable(!"HIVE".equals(vo.getSourceTypeId())); + field.setFieldEditable(!"HIVE".equals(vo.getSourceTypeId()) && !"ELASTICSEARCH".equals(vo.getSourceTypeId())); } message.data("sourceFields", sourceFields); @@ -1250,31 +1338,72 @@ public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, F for (int i = 0; i < sinkFields.size(); i++) { DataSourceDbTableColumnDTO field = sinkFields.get(i); // field.setFieldIndex(i); - field.setFieldEditable(!"HIVE".equals(vo.getSinkTypeId())); + field.setFieldEditable(!"HIVE".equals(vo.getSinkTypeId()) && !"ELASTICSEARCH".equals(vo.getSinkTypeId())); } message.data("sinkFields", sinkFields); + // field mapping deduction List> deductions = new ArrayList<>(); List left = sourceFields; List right = sinkFields; boolean exchanged = false; - if (containHive && "HIVE".equals(vo.getSinkTypeId())) { + if (containHive && "HIVE".equals(vo.getSourceTypeId())) { left = sinkFields; right = sourceFields; exchanged = true; } - for (int i = 0; i < left.size(); i ++){ - DataSourceDbTableColumnDTO leftElement = left.get(i); - DataSourceDbTableColumnDTO rightElement = right.get(i % right.size()); - Map deduction = new HashMap<>(); - deduction.put("source", exchanged ? rightElement : leftElement); - deduction.put("sink", exchanged ? leftElement : rightElement); - deduction.put("deleteEnable", true); - deductions.add(deduction); + + // source size and sink size must not be null + if (!Objects.isNull(left) && left.size() > 0) { + for (int i = 0; i < right.size(); i ++){ + DataSourceDbTableColumnDTO leftElement = left.get(i % left.size()); + DataSourceDbTableColumnDTO rightElement = right.get(i); + Map deduction = new HashMap<>(); + deduction.put("source", exchanged ? rightElement : leftElement); + deduction.put("sink", exchanged ? leftElement : rightElement); + deduction.put("deleteEnable", true); + deductions.add(deduction); + } } + message.data("deductions", deductions); + message.data("transformEnable", true); return message; } + public Message encryptConnectInfo(String encryStr) throws Exception { + if (Objects.isNull(encryStr)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "dataSourceType connect parameter show not be null"); + } + + String publicKeyStr = RSAUtil.PUBLIC_KEY_STR.getValue(); + + LOGGER.info("publicKeyStr is :{}", publicKeyStr); + PublicKey publicKey = RSAUtil.string2PublicKey(publicKeyStr); + //用公钥加密 + byte[] publicEncrypt = RSAUtil.publicEncrypt(encryStr.getBytes(), publicKey); + //加密后的内容Base64编码 + String byte2Base64 = RSAUtil.byte2Base64(publicEncrypt); + Message message = new Message(); + message.data("encryStr", byte2Base64); + return message; + } + + public Message decryptConnectInfo(String sinkStr) throws Exception { + if (Objects.isNull(sinkStr)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "dataSourceType connect parameter show not be null"); + } + + String privateKeyStr = RSAUtil.PRIVATE_KEY_STR.getValue(); + PrivateKey privateKey = RSAUtil.string2PrivateKey(privateKeyStr); + //加密后的内容Base64解码 + byte[] base642Byte = RSAUtil.base642Byte(sinkStr); + //用私钥解密 + byte[] privateDecrypt = RSAUtil.privateDecrypt(base642Byte, privateKey); + String decryptStr = new String(privateDecrypt); + Message message = new Message(); + message.data("decryptStr", decryptStr); + return message; + } } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java index 397cadc5c..ff870752b 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java @@ -8,10 +8,7 @@ import org.springframework.stereotype.Service; import javax.annotation.Resource; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * Default render service @@ -41,7 +38,7 @@ public class DefaultDataSourceRenderService implements DataSourceRenderService { public ElementUI getPartitionAndRender(String userName, Long dataSourceId, String database, String table, ElementUI.Type uiType) throws ExchangisDataSourceException { List partitionKeys = metadataInfoService.getPartitionKeys(userName, dataSourceId, database, table); - Map renderParams = new HashMap<>(); + Map renderParams = new LinkedHashMap<>(); List placeHolder = Arrays.asList(DEFAULT_PLACEHOLDER); partitionKeys.forEach(partition -> renderParams.putIfAbsent(partition, placeHolder)); return elementUIFactory.createElement(uiType.name(), renderParams, Map.class); diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java index f603609bc..3b50117f3 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java @@ -8,6 +8,7 @@ public class DataSourceQueryVO { private Long typeId; private String typeName; private String name; + private String direct; private Map labels; public Map getLabels() { @@ -57,4 +58,12 @@ public Long getTypeId() { public void setTypeId(Long typeId) { this.typeId = typeId; } + + public String getDirect() { + return direct; + } + + public void setDirect(String direct) { + this.direct = direct; + } } diff --git a/exchangis-datasource/exchangis-datasource-streamis/pom.xml b/exchangis-datasource/exchangis-datasource-streamis/pom.xml index 760130881..906e781a2 100644 --- a/exchangis-datasource/exchangis-datasource-streamis/pom.xml +++ b/exchangis-datasource/exchangis-datasource-streamis/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -20,7 +20,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml new file mode 100644 index 000000000..a92fa201c --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis-datasource + com.webank.wedatasphere.exchangis + 1.1.2 + ../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-elasticsearch + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + 1.1.2 + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java new file mode 100644 index 000000000..607af79de --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.mysql; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +public class ExchangisESDataSource extends ExchangisBatchDataSource { + + @Override + public String name() { + return DataSourceType.ELASTICSEARCH.name; + } + + @Override + public String classifier() { + return Classifier.ELASTICSEARCH.name; + } + + @Override + public String structClassifier() { + return StructClassifier.NON_STRUCTURED.name; + } + + @Override + public String description() { + return "This is ES DataSource"; + } + + @Override + public String option() { + return "ES无结构化存储"; + } + + @Override + public String icon() { + return "icon-es"; + } + + @Override + public List getDataSourceParamConfigs() { + return super.getDataSourceParamConfigs(DataSourceType.ELASTICSEARCH.name); + } +} \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml index 1f1056628..c1e258f85 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../../pom.xml 4.0.0 @@ -22,17 +22,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java index d2547a1a6..e9ce77738 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java @@ -1,31 +1,33 @@ package com.webank.wedatasphere.exchangis.extension.datasource.hive; import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; import java.util.List; public class ExchangisHiveDataSource extends ExchangisBatchDataSource { - private static final String DATASOURCE_TYPE = "HIVE"; - -// @Override -// public String id() { -// return null; -// } @Override public String name() { - return DATASOURCE_TYPE; + return DataSourceType.HIVE.name; } @Override public String classifier() { - return "大数据存储"; + return Classifier.HIVE.name; + } + + @Override + public String structClassifier() { + return StructClassifier.SEMI_STRUCTURED.name; } @Override public String description() { - return "Hive Description"; + return "This is Hive DataSource"; } @Override @@ -40,6 +42,6 @@ public String icon() { @Override public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DATASOURCE_TYPE); + return super.getDataSourceParamConfigs(DataSourceType.HIVE.name); } } diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml new file mode 100644 index 000000000..a26312d6e --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis-datasource + com.webank.wedatasphere.exchangis + 1.1.2 + ../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-mongodb + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + 1.1.2 + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java new file mode 100644 index 000000000..269efce87 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.mysql; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +public class ExchangisMongoDbDataSource extends ExchangisBatchDataSource { + + @Override + public String name() { + return DataSourceType.MONGODB.name; + } + + @Override + public String classifier() { + return Classifier.MONGODB.name; + } + + @Override + public String structClassifier() { + return StructClassifier.SEMI_STRUCTURED.name; + } + + @Override + public String description() { + return "This is MongoDB DataSource"; + } + + @Override + public String option() { + return "mongodb无结构存储"; + } + + @Override + public String icon() { + return "icon-mongodb"; + } + + @Override + public List getDataSourceParamConfigs() { + return super.getDataSourceParamConfigs(DataSourceType.MONGODB.name); + } +} \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml index ab4960f3a..71bb30072 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java index 49ca54d59..e05e790da 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java @@ -1,44 +1,38 @@ package com.webank.wedatasphere.exchangis.extension.datasource.mysql; import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; import java.util.List; public class ExchangisMySQLDataSource extends ExchangisBatchDataSource { - private static final String DATASOURCE_TYPE = "MYSQL"; - -// @Override -// public String id() { -// if (null == id || id.equalsIgnoreCase("")) { -// List types = super.getDataSourceTypes("hdfs"); -// for (DataSourceType type : types) { -// if (type.getName().equalsIgnoreCase(DATASOURCE_TYPE)) { -// this.id = type.getId(); -// } -// } -// } -// return this.id; -// } @Override public String name() { - return DATASOURCE_TYPE; + return DataSourceType.MYSQL.name; } @Override public String classifier() { - return "关系型数据库"; + return Classifier.MYSQL.name; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; } @Override public String description() { - return "MYSQL description"; + return "This is MySQL DataSource"; } @Override public String option() { - return "mysql数据库"; + return "MySQL数据库"; } @Override @@ -48,6 +42,6 @@ public String icon() { @Override public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DATASOURCE_TYPE); + return super.getDataSourceParamConfigs(DataSourceType.MYSQL.name); } } \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml new file mode 100644 index 000000000..e3cce71da --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis-datasource + com.webank.wedatasphere.exchangis + 1.1.2 + ../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-oracle + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + 1.1.2 + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java new file mode 100644 index 000000000..8d3ca3adb --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.oracle; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +/** + * @author jefftlin + * @create 2022-09-14 + **/ +public class ExchangisOracleDataSource extends ExchangisBatchDataSource { + @Override + public String name() { + return DataSourceType.ORACLE.name; + } + + @Override + public String description() { + return "This is Oracle DataSource"; + } + + @Override + public String option() { + return "Oracle数据库"; + } + + @Override + public String classifier() { + return Classifier.ORACLE.name; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String icon() { + return "icon-oracle"; + } + + @Override + public List getDataSourceParamConfigs() { + return super.getDataSourceParamConfigs(DataSourceType.ORACLE.name); + } + +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml index feafb27be..47af538ab 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml @@ -5,7 +5,7 @@ exchangis-datasource com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.0.0 + 1.1.2 diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java index bdfe439b4..c8256dddb 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java @@ -1,30 +1,37 @@ package com.webank.wedatasphere.exchangis.extension.datasource.sftp; import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; import java.util.List; public class ExchangisSftpDataSource extends ExchangisBatchDataSource { - private static final String DATASOURCE_TYPE = "SFTP"; @Override public String name() { - return DATASOURCE_TYPE; + return DataSourceType.SFTP.name; } @Override - public String description() { - return "sftp连接"; + public String classifier() { + return Classifier.SFTP.name; } @Override - public String option() { - return "SFTP"; + public String structClassifier() { + return StructClassifier.NON_STRUCTURED.name; } @Override - public String classifier() { + public String description() { + return "This is Sftp"; + } + + @Override + public String option() { return "SFTP"; } @@ -35,6 +42,6 @@ public String icon() { @Override public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DATASOURCE_TYPE); + return super.getDataSourceParamConfigs(DataSourceType.SFTP.name); } } diff --git a/exchangis-datasource/pom.xml b/exchangis-datasource/pom.xml index 314ffea1e..ad1bd9fae 100644 --- a/exchangis-datasource/pom.xml +++ b/exchangis-datasource/pom.xml @@ -5,13 +5,12 @@ exchangis com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 exchangis-datasource pom - 1.0.0 exchangis-datasource-core @@ -22,6 +21,9 @@ extension-datasources/exchangis-datasource-ext-mysql extension-datasources/exchangis-datasource-ext-hive extension-datasources/exchangis-datasource-ext-sftp + extension-datasources/exchangis-datasource-ext-elasticsearch + extension-datasources/exchangis-datasource-ext-mongodb + extension-datasources/exchangis-datasource-ext-oracle exchangis-datasource-server diff --git a/exchangis-engines/engineconn-plugins/datax/pom.xml b/exchangis-engines/engineconn-plugins/datax/pom.xml new file mode 100644 index 000000000..dd10e755f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/pom.xml @@ -0,0 +1,204 @@ + + + + exchangis-engines + com.webank.wedatasphere.exchangis + 1.1.2 + ../../pom.xml + + 4.0.0 + + linkis-engineplugin-datax + + + 1.4.0 + 3.0.0-Plus-2 + 3.0.0 + 3.3.4 + 1.0.15 + 2.1.9 + 0.3 + 1.0.5 + 2.0 + 1.2 + 1.10 + + + + com.webank.wedatasphere.exchangis + datax-core + ${datax.engine.version} + + + hadoop-annotations + org.apache.hadoop + + + fastjson + com.alibaba + + + hadoop-common + org.apache.hadoop + + + + + net.sourceforge.javacsv + javacsv + ${csv.version} + + + com.alibaba + druid + ${druid.version} + + + org.codehaus.groovy + groovy-all + ${groovy.version} + + + io.airlift + aircompressor + ${aircompressor.version} + + + org.anarres.lzo + lzo-core + ${lzo.version} + + + org.apache.linkis + linkis-once-engineconn + ${linkis.version} + + + org.apache.linkis + linkis-engineconn-plugin-core + ${linkis.version} + + + org.apache.linkis + linkis-storage + ${linkis.version} + provided + + + org.apache.linkis + linkis-common + ${linkis.version} + + + commons-cli + commons-cli + ${commons-cli-version} + + + commons-configuration + commons-configuration + ${commons-configuration-version} + + + org.apache.linkis + linkis-computation-engineconn + 1.4.0 + + + org.apache.linkis + linkis-udf-common + 1.4.0 + + + org.apache.linkis + linkis-udf-client + 1.4.0 + + + + + + com.diffplug.spotless + spotless-maven-plugin + + + + check + + none + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + out + false + false + + src/main/assembly/distribution.xml + + + + + + + src/main/java + + **/*.xml + **/*.properties + + + + src/main/resources + + **/application.yml + **/bootstrap.yml + + + + + + + + apache.snapshots + Apache Snapshot Repository + https://repository.apache.org/content/repositories/snapshots/ + + true + + + + \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml b/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml new file mode 100644 index 000000000..6c517ef5b --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml @@ -0,0 +1,321 @@ + + + + datax + + dir + zip + + true + datax + + + + + + /dist/${datax.version}/lib + true + true + false + false + true + + + antlr:antlr:jar + aopalliance:aopalliance:jar + asm:asm:jar + cglib:cglib:jar + com.amazonaws:aws-java-sdk-autoscaling:jar + com.amazonaws:aws-java-sdk-core:jar + com.amazonaws:aws-java-sdk-ec2:jar + com.amazonaws:aws-java-sdk-route53:jar + com.amazonaws:aws-java-sdk-sts:jar + com.amazonaws:jmespath-java:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar + com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar + com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar + com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-paranamer:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + com.github.andrewoma.dexx:dexx-collections:jar + com.github.vlsi.compactmap:compactmap:jar + com.google.code.findbugs:annotations:jar + com.google.code.findbugs:jsr305:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + com.google.inject:guice:jar + com.google.protobuf:protobuf-java:jar + com.netflix.archaius:archaius-core:jar + com.netflix.eureka:eureka-client:jar + com.netflix.eureka:eureka-core:jar + com.netflix.hystrix:hystrix-core:jar + com.netflix.netflix-commons:netflix-commons-util:jar + com.netflix.netflix-commons:netflix-eventbus:jar + com.netflix.netflix-commons:netflix-infix:jar + com.netflix.netflix-commons:netflix-statistics:jar + com.netflix.ribbon:ribbon:jar + com.netflix.ribbon:ribbon-core:jar + com.netflix.ribbon:ribbon-eureka:jar + com.netflix.ribbon:ribbon-httpclient:jar + com.netflix.ribbon:ribbon-loadbalancer:jar + com.netflix.ribbon:ribbon-transport:jar + com.netflix.servo:servo-core:jar + com.ning:async-http-client:jar + com.sun.jersey.contribs:jersey-apache-client4:jar + com.sun.jersey:jersey-client:jar + com.sun.jersey:jersey-core:jar + com.sun.jersey:jersey-json:jar + com.sun.jersey:jersey-server:jar + com.sun.jersey:jersey-servlet:jar + com.sun.xml.bind:jaxb-impl:jar + com.thoughtworks.paranamer:paranamer:jar + com.thoughtworks.xstream:xstream:jar + org.apache.linkis:linkis-common:jar + org.apache.linkis:linkis-module:jar + commons-beanutils:commons-beanutils:jar + commons-beanutils:commons-beanutils-core:jar + commons-cli:commons-cli:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-configuration:commons-configuration:jar + commons-daemon:commons-daemon:jar + commons-dbcp:commons-dbcp:jar + commons-digester:commons-digester:jar + commons-httpclient:commons-httpclient:jar + commons-io:commons-io:jar + commons-jxpath:commons-jxpath:jar + commons-lang:commons-lang:jar + commons-logging:commons-logging:jar + commons-net:commons-net:jar + commons-pool:commons-pool:jar + io.micrometer:micrometer-core:jar + io.netty:netty:jar + io.netty:netty-all:jar + io.netty:netty-buffer:jar + io.netty:netty-codec:jar + io.netty:netty-codec-http:jar + io.netty:netty-common:jar + io.netty:netty-handler:jar + io.netty:netty-transport:jar + io.netty:netty-transport-native-epoll:jar + io.reactivex:rxjava:jar + io.reactivex:rxnetty:jar + io.reactivex:rxnetty-contexts:jar + io.reactivex:rxnetty-servo:jar + javax.activation:activation:jar + javax.annotation:javax.annotation-api:jar + javax.inject:javax.inject:jar + javax.servlet:javax.servlet-api:jar + javax.servlet.jsp:jsp-api:jar + javax.validation:validation-api:jar + javax.websocket:javax.websocket-api:jar + javax.ws.rs:javax.ws.rs-api:jar + javax.xml.bind:jaxb-api:jar + javax.xml.stream:stax-api:jar + joda-time:joda-time:jar + log4j:log4j:jar + mysql:mysql-connector-java:jar + net.databinder.dispatch:dispatch-core_2.11:jar + net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar + org.antlr:antlr-runtime:jar + org.antlr:stringtemplate:jar + org.apache.commons:commons-compress:jar + org.apache.commons:commons-math:jar + org.apache.commons:commons-math3:jar + org.apache.curator:curator-client:jar + org.apache.curator:curator-framework:jar + org.apache.curator:curator-recipes:jar + org.apache.directory.api:api-asn1-api:jar + org.apache.directory.api:api-util:jar + org.apache.directory.server:apacheds-i18n:jar + org.apache.directory.server:apacheds-kerberos-codec:jar + org.apache.hadoop:hadoop-annotations:jar + org.apache.hadoop:hadoop-auth:jar + org.apache.hadoop:hadoop-common:jar + org.apache.hadoop:hadoop-hdfs:jar + org.apache.htrace:htrace-core:jar + org.apache.httpcomponents:httpclient:jar + org.apache.httpcomponents:httpcore:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + org.apache.zookeeper:zookeeper:jar + org.aspectj:aspectjweaver:jar + org.bouncycastle:bcpkix-jdk15on:jar + org.bouncycastle:bcprov-jdk15on:jar + org.codehaus.jackson:jackson-jaxrs:jar + org.codehaus.jackson:jackson-xc:jar + org.codehaus.jettison:jettison:jar + org.codehaus.woodstox:stax2-api:jar + org.codehaus.woodstox:woodstox-core-asl:jar + org.eclipse.jetty:jetty-annotations:jar + org.eclipse.jetty:jetty-client:jar + org.eclipse.jetty:jetty-continuation:jar + org.eclipse.jetty:jetty-http:jar + org.eclipse.jetty:jetty-io:jar + org.eclipse.jetty:jetty-jndi:jar + org.eclipse.jetty:jetty-plus:jar + org.eclipse.jetty:jetty-security:jar + org.eclipse.jetty:jetty-server:jar + org.eclipse.jetty:jetty-servlet:jar + org.eclipse.jetty:jetty-servlets:jar + org.eclipse.jetty:jetty-util:jar + org.eclipse.jetty:jetty-webapp:jar + org.eclipse.jetty:jetty-xml:jar + org.eclipse.jetty.websocket:javax-websocket-client-impl:jar + org.eclipse.jetty.websocket:javax-websocket-server-impl:jar + org.eclipse.jetty.websocket:websocket-api:jar + org.eclipse.jetty.websocket:websocket-client:jar + org.eclipse.jetty.websocket:websocket-common:jar + org.eclipse.jetty.websocket:websocket-server:jar + org.eclipse.jetty.websocket:websocket-servlet:jar + org.fusesource.leveldbjni:leveldbjni-all:jar + org.glassfish.hk2:class-model:jar + org.glassfish.hk2:config-types:jar + org.glassfish.hk2.external:aopalliance-repackaged:jar + org.glassfish.hk2.external:asm-all-repackaged:jar + org.glassfish.hk2.external:bean-validator:jar + org.glassfish.hk2.external:javax.inject:jar + org.glassfish.hk2:hk2:jar + org.glassfish.hk2:hk2-api:jar + org.glassfish.hk2:hk2-config:jar + org.glassfish.hk2:hk2-core:jar + org.glassfish.hk2:hk2-locator:jar + org.glassfish.hk2:hk2-runlevel:jar + org.glassfish.hk2:hk2-utils:jar + org.glassfish.hk2:osgi-resource-locator:jar + org.glassfish.hk2:spring-bridge:jar + org.glassfish.jersey.bundles:jaxrs-ri:jar + org.glassfish.jersey.bundles.repackaged:jersey-guava:jar + org.glassfish.jersey.containers:jersey-container-servlet:jar + org.glassfish.jersey.containers:jersey-container-servlet-core:jar + org.glassfish.jersey.core:jersey-client:jar + org.glassfish.jersey.core:jersey-common:jar + org.glassfish.jersey.core:jersey-server:jar + org.glassfish.jersey.ext:jersey-entity-filtering:jar + org.glassfish.jersey.ext:jersey-spring3:jar + org.glassfish.jersey.media:jersey-media-jaxb:jar + org.glassfish.jersey.media:jersey-media-json-jackson:jar + org.glassfish.jersey.media:jersey-media-multipart:jar + org.hdrhistogram:HdrHistogram:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.jsoup:jsoup:jar + org.jvnet.mimepull:mimepull:jar + org.jvnet:tiger-types:jar + org.latencyutils:LatencyUtils:jar + org.mortbay.jasper:apache-el:jar + org.mortbay.jetty:jetty:jar + org.mortbay.jetty:jetty-util:jar + org.ow2.asm:asm-analysis:jar + org.ow2.asm:asm-commons:jar + org.ow2.asm:asm-tree:jar + org.reflections:reflections:jar + org.scala-lang.modules:scala-parser-combinators_2.11:jar + org.scala-lang.modules:scala-xml_2.11:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:jul-to-slf4j:jar + org.slf4j:slf4j-api:jar + org.springframework.boot:spring-boot:jar + org.springframework.boot:spring-boot-actuator:jar + org.springframework.boot:spring-boot-actuator-autoconfigure:jar + org.springframework.boot:spring-boot-autoconfigure:jar + org.springframework.boot:spring-boot-starter:jar + org.springframework.boot:spring-boot-starter-actuator:jar + org.springframework.boot:spring-boot-starter-aop:jar + org.springframework.boot:spring-boot-starter-jetty:jar + org.springframework.boot:spring-boot-starter-json:jar + org.springframework.boot:spring-boot-starter-log4j2:jar + org.springframework.boot:spring-boot-starter-web:jar + org.springframework.cloud:spring-cloud-commons:jar + org.springframework.cloud:spring-cloud-config-client:jar + org.springframework.cloud:spring-cloud-context:jar + org.springframework.cloud:spring-cloud-netflix-archaius:jar + org.springframework.cloud:spring-cloud-netflix-core:jar + org.springframework.cloud:spring-cloud-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-netflix-ribbon:jar + org.springframework.cloud:spring-cloud-starter:jar + org.springframework.cloud:spring-cloud-starter-config:jar + org.springframework.cloud:spring-cloud-starter-eureka:jar + org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar + org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar + org.springframework.security:spring-security-crypto:jar + org.springframework.security:spring-security-rsa:jar + org.springframework:spring-aop:jar + org.springframework:spring-beans:jar + org.springframework:spring-context:jar + org.springframework:spring-core:jar + org.springframework:spring-expression:jar + org.springframework:spring-jcl:jar + org.springframework:spring-web:jar + org.springframework:spring-webmvc:jar + org.tukaani:xz:jar + org.yaml:snakeyaml:jar + software.amazon.ion:ion-java:jar + xerces:xercesImpl:jar + xmlenc:xmlenc:jar + xmlpull:xmlpull:jar + xpp3:xpp3_min:jar + + + + + + + ${basedir}/src/main/resources + + * + + 0777 + 0755 + /dist/${datax.version}/conf + unix + + + ${basedir}/target + + *.jar + + + *doc.jar + + 0777 + /plugin/${datax.version} + + + + + diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxJobExecutionException.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxJobExecutionException.java new file mode 100644 index 000000000..a92a59395 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxJobExecutionException.java @@ -0,0 +1,21 @@ +package org.apache.linkis.engineconnplugin.datax.exception; + +import org.apache.linkis.common.exception.ErrorException; + +public class DataxJobExecutionException extends ErrorException { + + public static final int ERROR_CODE = 16023; + + public DataxJobExecutionException(String message) { + super(ERROR_CODE, message); + } + + public DataxJobExecutionException(int errCode, String desc) { + super(errCode, desc); + } + + public DataxJobExecutionException(String message, Throwable e) { + super(ERROR_CODE, message); + this.initCause(e); + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxPluginLoadException.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxPluginLoadException.java new file mode 100644 index 000000000..908b55bf9 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxPluginLoadException.java @@ -0,0 +1,16 @@ +package org.apache.linkis.engineconnplugin.datax.exception; + +import org.apache.linkis.common.exception.ErrorException; + +/** + * Plugin load exception + */ +public class DataxPluginLoadException extends ErrorException { + + public static final int ERROR_CODE = 16022; + + public DataxPluginLoadException(String message, String desc) { + super(ERROR_CODE, message); + + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginBmlResource.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginBmlResource.java new file mode 100644 index 000000000..dd90b03e4 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginBmlResource.java @@ -0,0 +1,36 @@ +package org.apache.linkis.engineconnplugin.datax.plugin; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** + * BML resources + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class PluginBmlResource extends PluginResource{ + + /** + * Resource id + */ + private String resourceId; + + /** + * Version + */ + private String version; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginResource.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginResource.java new file mode 100644 index 000000000..d2c69d818 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginResource.java @@ -0,0 +1,62 @@ +package org.apache.linkis.engineconnplugin.datax.plugin; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** + * Define the plugin resource + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class PluginResource { + + /** + * Resource name + */ + protected String name; + + /** + * Resource type + */ + protected String type; + + /** + * Resource path + */ + protected String path; + + /** + * Resource creator + */ + protected String creator; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/utils/SecretUtils.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/utils/SecretUtils.java new file mode 100644 index 000000000..3fb28566b --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/utils/SecretUtils.java @@ -0,0 +1,32 @@ +package org.apache.linkis.engineconnplugin.datax.utils; + +import com.alibaba.datax.common.util.Configuration; +import org.apache.commons.lang3.StringUtils; + +import java.util.Set; + +/** + * Security utils + */ +public class SecretUtils { + /** + * Extracted from 'Engine' class + */ + public static Configuration filterSensitiveConfiguration(Configuration configuration){ + Set keys = configuration.getKeys(); + String[] sensitiveSuffixes = new String[]{"password", "accessKey", "path"}; + for (final String key : keys) { + boolean isSensitive = false; + for(String suffix : sensitiveSuffixes){ + if(StringUtils.endsWithIgnoreCase(key, suffix)){ + isSensitive = true; + break; + } + } + if (isSensitive && configuration.get(key) instanceof String) { + configuration.set(key, configuration.getString(key).replaceAll("[\\s\\S]", "*")); + } + } + return configuration; + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/resources/linkis-engineconn.properties b/exchangis-engines/engineconn-plugins/datax/src/main/resources/linkis-engineconn.properties new file mode 100644 index 000000000..e836a184a --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/resources/linkis-engineconn.properties @@ -0,0 +1,44 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +wds.linkis.server.version=v1 +#wds.linkis.engineconn.debug.enable=true +#wds.linkis.keytab.enable=true +wds.linkis.engineconn.plugin.default.class=org.apache.linkis.engineconnplugin.datax.DataxEngineConnPlugin +wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.once.executor.hook.OnceEngineConnHook + +# Core configuration +#core.transport.type=record +#core.transport.channel.speed.byte=5242880 +#core.transport.channel.speed.record=10000 +#core.transport.channel.flowControlInterval=20 +#core.transport.channel.capacity=512 +#core.transport.channel.byteCapacity=67108864 +#core.transport.record.channel.class=com.alibaba.datax.core.transport.channel.memory.MemoryRecordChannel +#core.transport.record.exchanger.class=com.alibaba.datax.core.plugin.BufferedRecordExchanger +#core.transport.record.exchanger.bufferSize=32 +#core.transport.stream.channel.class=com.webank.wedatasphere.exchangis.datax.core.transport.channel.memory.MemoryStreamChannel +#core.transport.stream.channel.bufferSize=8192 +#core.container.job.reportInterval=5000 +#core.container.job.sleepInterval=5000 +#core.container.taskGroup.reportInterval=5000 +#core.container.taskGroup.sleepInterval=100 +#core.container.taskGroup.channel=5 +#core.container.trace.enable=false +#core.statistics.collector.plugin.taskClass=com.alibaba.datax.core.statistics.plugin.task.StdoutPluginCollector +#core.statistics.collector.plugin.maxDirtyNumber=10 +#core.processor.loader.plugin.class=com.webank.wedatasphere.exchangis.datax.core.processor.loader.plugin.DefaultPluginProcessorLoader +#core.processor.loader.plugin.package=com.webank.wedatasphere.exchangis.datax.core.processor.impl +#core.processor.loader.plugin.sourcePath=proc/src \ No newline at end of file diff --git a/exchangis-plugins/engine/sqoop/src/main/resources/log4j2.xml b/exchangis-engines/engineconn-plugins/datax/src/main/resources/log4j2.xml similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/resources/log4j2.xml rename to exchangis-engines/engineconn-plugins/datax/src/main/resources/log4j2.xml diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/DataxEngineConnPlugin.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/DataxEngineConnPlugin.scala new file mode 100644 index 000000000..996e4da90 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/DataxEngineConnPlugin.scala @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax + +import org.apache.linkis.engineconnplugin.datax.factory.DataxEngineConnFactory +import org.apache.linkis.engineconnplugin.datax.launch.DataxEngineConnLaunchBuilder +import org.apache.linkis.engineconnplugin.datax.resource.DataxEngineConnResourceFactory +import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin +import org.apache.linkis.manager.engineplugin.common.creation.EngineConnFactory +import org.apache.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder +import org.apache.linkis.manager.engineplugin.common.resource.EngineResourceFactory +import org.apache.linkis.manager.label.entity.Label + +import java.util.List +import java.util.ArrayList + +class DataxEngineConnPlugin extends EngineConnPlugin { + + private var engineResourceFactory: EngineResourceFactory = _ + private val engineResourceFactoryLocker = new Array[Byte](0) + + private var engineConnFactory: EngineConnFactory = _ + private val engineConnFactoryLocker = new Array[Byte](0) + + override def init(params: java.util.Map[_root_.scala.Predef.String, scala.AnyRef]): Unit = {} + + override def getEngineResourceFactory: EngineResourceFactory = { + if (null == engineResourceFactory) engineResourceFactoryLocker.synchronized { + if (null == engineResourceFactory) { + engineResourceFactory = new DataxEngineConnResourceFactory + } + } + engineResourceFactory + } + + override def getEngineConnLaunchBuilder: EngineConnLaunchBuilder = { + new DataxEngineConnLaunchBuilder + } + + override def getEngineConnFactory: EngineConnFactory = { + if (null == engineConnFactory) engineConnFactoryLocker.synchronized { + if (null == engineConnFactory) { + engineConnFactory = new DataxEngineConnFactory + } + } + engineConnFactory + } + + override def getDefaultLabels: List[Label[_]] = { + new ArrayList[Label[_]]() + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxConfiguration.scala new file mode 100644 index 000000000..1c84c5499 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxConfiguration.scala @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.conf.{CommonVars, TimeType} + +/** + * Datax basic config + */ +object DataxConfiguration { + + val CONFIG_PREFIX: String = "_datax_." + /** + * Environment config name + */ + val ENV_CONFIG_NAME: CommonVars[String] = CommonVars[String]("datax.env.config.name", "entry.environment") + + /** + * Fetch interval + */ + val STATUS_FETCH_INTERVAL: CommonVars[TimeType] = CommonVars("wds.linkis.engineconn.datax.fetch.status.interval", new TimeType("5s")) + + /** + * Execution id + */ + val JOB_EXECUTION_ID: CommonVars[String] = CommonVars[String]("wds.linkis.engineconn.datax.execution.id", "") + + /** + * Plugin resources + */ + val PLUGIN_RESOURCES: CommonVars[String] = CommonVars[String]("wds.linkis.engineconn.datax.bml.resources", "") + + /** + * Security manager + */ + val SECURITY_MANAGER_CLASSES: CommonVars[String] = CommonVars[String]("wds.linkis.engineconn.datax.security.manager", "") +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala new file mode 100644 index 000000000..01907434f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala @@ -0,0 +1,160 @@ +package org.apache.linkis.engineconnplugin.datax.config + +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.linkis.common.conf.CommonVars + +/** + * Core configuration in datax + */ +object DataxCoreConfiguration { + + /** + * Format for 'datetime' column + */ + val COMMON_COLUMN_DATETIME_FORMAT: CommonVars[String] = CommonVars("common.column.datetimeFormat", "yyyy-MM-dd HH:mm:ss"); + + /** + * Format for 'time' column + */ + val COMMON_COLUMN_TIME_FORMAT: CommonVars[String] = CommonVars("common.column.timeFormat", "HH:mm:ss") + + /** + * Format for 'date' column + */ + val COMMON_COLUMN_DATE_FORMAT: CommonVars[String] = CommonVars("common.column.dateFormat", "yyyy-MM-dd") + + /** + * Extra format for 'date','datetime' and 'time' + */ + val COMMON_COLUMN_EXTRA_FORMATS: CommonVars[String] = CommonVars("common.column.extraFormats", "yyyy-MM-dd") + + /** + * TimeZone + */ + val COMMON_COLUMN_TIMEZONE: CommonVars[String] = CommonVars("common.column.timeZone", "GMT+8") + + /** + * Encoding + */ + val COMMON_COLUMN_ENCODING: CommonVars[String] = CommonVars("common.column.encoding", "utf-8") + + /** + * Container model + */ + val CORE_CONTAINER_MODEL: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_MODEL, "job") + + /** + * Transport type + */ + val CORE_TRANSPORT_TYPE: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_TYPE, "record") + + /** + * Channel speed in byte + */ + val CORE_TRANSPORT_CHANNEL_SPEED_BYTE: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, 5242880) + + /** + * Channel speed in record + */ + val CORE_TRANSPORT_CHANNEL_SPEED_RECORD: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, 10000) + + /** + * Flow control interval + */ + val CORE_TRANSPORT_CHANNEL_FLOW_CONTROL_INTERNAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_FLOWCONTROLINTERVAL, 20) + + /** + * Channel capacity in record(s) + */ + val CORE_TRANSPORT_CHANNEL_CAPACITY: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY, 512) + + /** + * Channel capacity in byte(s) + */ + val CORE_TRANSPORT_CHANNEL_BYTE_CAPACITY: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE, 67108864) + + /** + * Record channel class + */ + val CORE_TRANSPORT_RECORD_CHANNEL_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_CHANNEL_CLASS, "com.alibaba.datax.core.transport.channel.memory.MemoryRecordChannel") + + /** + * Record exchanger class + */ + val CORE_TRANSPORT_RECORD_EXCHANGER_CLASS: CommonVars[String] = CommonVars("core.transport.record.exchanger.class", "com.alibaba.datax.core.plugin.BufferedRecordExchanger") + + /** + * Buffer size of record exchanger + */ + val CORE_TRANSPORT_RECORD_EXCHANGER_BUFFER_SIZE: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_EXCHANGER_BUFFERSIZE, 32) + + /** + * Stream channel class + */ + val CORE_TRANSPORT_STREAM_CHANNEL_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_STREAM_CHANNEL_CLASS, "com.webank.wedatasphere.exchangis.datax.core.transport.channel.memory.MemoryStreamChannel") + + /** + * Block size of stream channel + */ + val CORE_TRANSPORT_STREAM_CHANNEL_BLOCK_SIZE: CommonVars[Int] = CommonVars("core.transport.stream.channel.bufferSize", 8192) + + /** + * Job report interval + */ + val CORE_CONTAINER_JOB_REPORT_INTERVAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_JOB_REPORTINTERVAL, 5000) + + /** + * Job sleep interval + */ + val CORE_CONTAINER_JOB_SLEEP_INTERNAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_JOB_SLEEPINTERVAL, 5000) + + /** + * Task group report interval + */ + val CORE_CONTAINER_TASK_GROUP_REPORT_INTERVAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_REPORTINTERVAL, 5000) + + /** + * Task group sleep interval + */ + val CORE_CONTAINER_TASK_GROUP_SLEEP_INTERNAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_SLEEPINTERVAL, 100) + + /** + * Channel number for task group + */ + val CORE_CONTAINER_TASK_GROUP_CHANNEL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, 5) + + /** + * Trace switch + */ + val CORE_CONTAINER_TRACE_ENABLE: CommonVars[Boolean] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, false) + + /** + * Plugin collector task class + */ + val CORE_STATISTICS_COLLECTOR_PLUGIN_TASK_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_TASKCLASS, "com.alibaba.datax.core.statistics.plugin.task.StdoutPluginCollector") + + /** + * Max dirty record number + */ + val CORE_STATISTICS_COLLECTOR_PLUGIN_MAX_DIRTY_NUMBER: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_MAXDIRTYNUM, 10) + + /** + * Reporter class (EC use DataxEngineConnCommunicateReporter) + */ + val CORE_STATISTICS_REPORTER_PLUGIN_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS, "org.apache.linkis.engineconnplugin.datax.report.DataxEngineConnCommunicateReporter") + /** + * Processor loader plugin class + */ + val CORE_PROCESSOR_LOADER_PLUGIN_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_PROCESSOR_LOADER_PLUGIN_CLASS, "com.webank.wedatasphere.exchangis.datax.core.processor.loader.plugin.DefaultPluginProcessorLoader") + + /** + * Package name of processor loader plugin + */ + val CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE, "com.webank.wedatasphere.exchangis.datax.core.processor.impl") + + /** + * Source path for processor loader plugin + */ + val CORE_PROCESSOR_LOADER_PLUGIN_SOURCE_PATH: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_PROCESSOR_LODAER_PLUGIN_SOURCEPATH, "proc/src") + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxResourceConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxResourceConfiguration.scala new file mode 100644 index 000000000..3f6ef46f4 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxResourceConfiguration.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.conf.CommonVars + +object DataxResourceConfiguration { + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSettingConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSettingConfiguration.scala new file mode 100644 index 000000000..25e2ed4d2 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSettingConfiguration.scala @@ -0,0 +1,49 @@ +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.conf.CommonVars + +/** + * Datax setting configuration + */ +object DataxSettingConfiguration { + + /** + * Sync meta + */ + val SETTING_SYNC_META: CommonVars[Boolean] = CommonVars("setting.syncMeta", false) + + /** + * Transport type + */ + val SETTING_TRANSPORT_TYPE: CommonVars[String] = CommonVars("setting.transport.type", "record") + + /** + * Key version for encrypt + */ + val SETTING_KEY_VERSION: CommonVars[String] = CommonVars("setting.keyVersion", "") + + /** + * Speed limit in byte(s) + */ + val SETTING_SPEED_BYTE: CommonVars[Int] = CommonVars("setting.speed.byte", 1048576) + + /** + * Speed limit in record(s) + */ + val SETTING_SPEED_RECORD: CommonVars[Int] = CommonVars("setting.speed.record", 100000) + + /** + * Speed limit in channel(s) + */ + val SETTING_SPEED_CHANNEL: CommonVars[Int] = CommonVars("setting.speed.channel", 0) + + /** + * Error limit in record + */ + val SETTING_ERROR_LIMIT_RECORD: CommonVars[Int] = CommonVars("setting.errorLimit.record", 0) + + /** + * If use processor + */ + val SETTING_USE_PROCESSOR: CommonVars[Boolean] = CommonVars("setting.useProcessor", false) +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxEngineConnContext.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxEngineConnContext.scala new file mode 100644 index 000000000..10bd12b16 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxEngineConnContext.scala @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.context + +import com.alibaba.datax.common.util.Configuration +import java.util +/** + * Datax engine conn context + */ +class DataxEngineConnContext(settings: Configuration, coreConfig: Configuration, pluginDefinitions: util.List[DataxPluginDefinition]) { + + /** + * Plugin definition + * @return + */ + def getPluginDefinitions: util.List[DataxPluginDefinition] = pluginDefinitions + + /** + * Settings configuration + * @return + */ + def getSettings: Configuration = settings + + /** + * Core configuration + * @return + */ + def getCoreConfig: Configuration = coreConfig +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxPluginDefinition.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxPluginDefinition.scala new file mode 100644 index 000000000..fa0726768 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxPluginDefinition.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.context + +import com.alibaba.datax.common.util.Configuration + +/** + * Plugin definitions + * @param pluginName plugin name + * @param pluginPath plugin path + * @param pluginConf plugin conf + */ +class DataxPluginDefinition(pluginName: String, pluginPath: String, pluginConf: Configuration) { + + /** + * Plugin name + * @return + */ + def getPluginName: String = pluginName + + /** + * Plugin path + * @return + */ + def getPluginPath: String = pluginPath + + /** + * Plugin configuration + * @return + */ + def getPluginConf: Configuration = pluginConf +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala new file mode 100644 index 000000000..9cec013ac --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala @@ -0,0 +1,328 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.executor + +import com.alibaba.datax.common.element.ColumnCast +import com.alibaba.datax.common.exception.DataXException +import com.alibaba.datax.common.statistics.{PerfTrace, VMInfo} +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.AbstractContainer +import com.alibaba.datax.core.util.container.{CoreConstant, LoadUtil} +import com.alibaba.datax.core.util.{ConfigurationValidate, ExceptionTracker, FrameworkErrorCode, SecretUtil} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.{ClassUtils, Utils} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.once.executor.{OnceExecutorExecutionContext, OperableOnceExecutor} +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.exception.{DataxJobExecutionException, DataxPluginLoadException} +import org.apache.linkis.engineconnplugin.datax.executor.DataxContainerOnceExecutor.{CODE_NAME, JOB_CONTENT_NAME} +import org.apache.linkis.engineconnplugin.datax.report.{BasicDataxReportReceiver, DataxReportReceiver} +import org.apache.linkis.engineconnplugin.datax.utils.SecretUtils +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.protocol.engine.JobProgressInfo +import org.apache.linkis.scheduler.executer.ErrorExecuteResponse + +import java.util +import java.util.concurrent.{Future, TimeUnit} +import scala.collection.JavaConverters._ + +/** + * Once executor for datax container + */ +abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with OperableOnceExecutor { + /** + * Executor configuration + */ + private var execConfiguration: Configuration = _ + /** + * Future + */ + private var future: Future[_] = _ + private var daemonThread: Future[_] = _ + + /** + * Report receiver + */ + private var reportReceiver: DataxReportReceiver = _ + + /** + * Container + */ + private var container: AbstractContainer = _ + override def getId: String = "DataxOnceApp_" + getContainerName + "_" + id + + override def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit = { + if (StringUtils.isNotBlank(DataxConfiguration.SECURITY_MANAGER_CLASSES.getValue)) { + // Set the security manager + System.setSecurityManager(ClassUtils.getClassInstance(DataxConfiguration.SECURITY_MANAGER_CLASSES.getValue)) + } + // Init the report receiver + if (Option(reportReceiver).isEmpty) reportReceiver = new BasicDataxReportReceiver() + var isFailed = false + future = Utils.defaultScheduler.submit(new Runnable { + override def run(): Unit = { + val params: util.Map[String, Object] = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent + val result = execute(params, onceExecutorExecutionContext.getEngineCreationContext) + if (result._1 != 0) { + isFailed = true + tryFailed() + val message = s"Exec Datax engine conn occurred error, with exit code: [${result._1}]" + setResponse(ErrorExecuteResponse(message, new DataxJobExecutionException(message, result._2))) + } + info(s"The executor: [${getId}] has been finished, now to stop DataxEngineConn.") + closeDaemon() + if (!isFailed) { + trySucceed() + } + this synchronized notify() + } + }) + } + + /** + * Wait to running + */ + override protected def waitToRunning(): Unit = { + if (!isClosed) daemonThread = Utils.defaultScheduler.scheduleAtFixedRate(new Runnable { + override def run(): Unit = { + if (!(future.isDone || future.isCancelled)) { + trace(s"The executor: [$getId] has been still running") + } + } + }, DataxConfiguration.STATUS_FETCH_INTERVAL.getValue.toLong, + DataxConfiguration.STATUS_FETCH_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS) + } + + /** + * Get report receiver + * @return + */ + def getReportReceiver: DataxReportReceiver = this.reportReceiver + + /** + * Get container + * @return + */ + def getContainer: AbstractContainer = this.container + override def getProgress: Float = { + Option(this.reportReceiver) match { + case Some(_) => this.reportReceiver.getProgress + case _ => 0f + } + } + + override def getProgressInfo: Array[JobProgressInfo] = { + Option(this.reportReceiver) match { + case Some(_) => this.reportReceiver.getProgressInfo + case _ => Array() + } + } + + override def getMetrics: util.Map[String, Any] = { + val metrics = Option(this.reportReceiver) match { + case Some(_) => this.reportReceiver.getMetrics + case _ => new util.HashMap[String, Any]() + } + // Report the resource + metrics.put("NodeResourceJson", getCurrentNodeResource().getUsedResource.toJson) + metrics + } + + override def getDiagnosis: util.Map[String, Any] = { + // Not support diagnosis + new util.HashMap[String, Any]() + } + + override def isClosed: Boolean = { + NodeStatus.isCompleted(getStatus) + } + + override def tryFailed(): Boolean = { +// Option(this.container).foreach(_.shutdown()) + super.tryFailed() + } + /** + * Execute with job content + * @param jobContent job content + * @param engineCreateContext engine create context + * @return + */ + private def execute(jobContent: util.Map[String, Object], engineCreateContext: EngineCreationContext):(Int, Throwable) = { + var exitCode: Int = 0 + var throwable: Throwable = null + Utils.tryCatch { + trace("Begin to decrypt the job content") + var fullConfig: Configuration = Configuration.from(jobContent) + fullConfig = SecretUtil.decryptSecretKey(fullConfig) + // Add the settings to job content + mergeConfig(fullConfig, dataxEngineConnContext.getSettings, CODE_NAME, updateWhenConflict = false) + // Add the core configuration to job content + mergeConfig(fullConfig, dataxEngineConnContext.getCoreConfig, "", updateWhenConflict = true) + // Print VM information + // Set plugin configuration + setPluginConfig(fullConfig) + Option(VMInfo.getVmInfo) match { + case Some(vm) => info(vm.toString) + case _ => + } + info(s"Try to launch executor: [${getId}] with job content: \n ${maskJobContent(fullConfig)}.\n") + // Seems that it is not important? + ConfigurationValidate.doValidate(fullConfig) + // Init environment settings + initEnvWithConfig(fullConfig) + // Store the full configuration + this.execConfiguration = fullConfig + execute(this.execConfiguration, engineCreateContext) + } { + e: Throwable => + exitCode = 1 + throwable = e + error(s"The possible reason of problem is : \n ${ExceptionTracker.trace(e)}") + e match { + case dataxE: DataXException => + val errorCode = dataxE.getErrorCode + errorCode match { + case code: FrameworkErrorCode => + exitCode = code.toExitValue + case _ => + } + case _ => + } + } + (exitCode, throwable) + } + + /** + * Execute with configuration + * @param self configuration + * @param engineCreateContext engine create context + */ + private def execute(self: Configuration, engineCreateContext: EngineCreationContext): Unit = { + // PrefTrace + val traceEnable = self.getBool(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, true) + val perfReportEnable = self.getBool(CoreConstant.DATAX_CORE_REPORT_DATAX_PERFLOG, true) + val jobInfo = self.getConfiguration(CoreConstant.DATAX_JOB_JOBINFO) + val channelNumber = self.getInt(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL) + val isJob = this.isInstanceOf[DataxJobOnceExecutor] + val taskGroupId: Int = if (isJob) -1 else self.getInt(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID) + val perfTrace = PerfTrace.getInstance(isJob, self.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID), taskGroupId, 0, traceEnable) + perfTrace.setJobInfo(jobInfo, perfReportEnable, channelNumber) + Option(createContainer(self, engineCreateContext)).foreach(container => { + this.container = container + container.start() + }) + } + /** + * Set plugin configuration + * @param self self configuration + */ + private def setPluginConfig(self: Configuration): Unit = { + val plugins: util.Map[String, Configuration] = dataxEngineConnContext + .getPluginDefinitions.asScala.map(define => (define.getPluginName, define.getPluginConf)).toMap.asJava + val pluginsNeed: util.Map[String, Configuration] = new util.HashMap() + Option(self.getString(CoreConstant.DATAX_JOB_CONTENT_READER_NAME)).foreach(readerPlugin => pluginsNeed.put(readerPlugin, plugins.get(readerPlugin))) + Option(self.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME)).foreach(writerPlugin => pluginsNeed.put(writerPlugin, plugins.get(writerPlugin))) + Option(self.getString(CoreConstant.DATAX_JOB_PREHANDLER_PLUGINNAME)).foreach(prePlugin => pluginsNeed.put(prePlugin, plugins.get(prePlugin))) + Option(self.getString(CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINNAME)).foreach(postPlugin => pluginsNeed.put(postPlugin, plugins.get(postPlugin))) + val noLoadPlugin = pluginsNeed.asScala.filter(entry => entry._2 == null).toMap + if (noLoadPlugin.nonEmpty){ + throw new DataxPluginLoadException(s"The specific plugins have not been loaded: [${noLoadPlugin.keys.mkString(",")}]", null) + } + pluginsNeed.asScala.foreach(entry => { + val pluginName = entry._1 + if (pluginName.endsWith("reader")){ + self.set(s"plugin.reader.${pluginName}", entry._2) + } else if (pluginName.endsWith("writer")){ + self.set(s"plugin.writer.${pluginName}", entry._2) + } else { + throw new DataxPluginLoadException(s"Unrecognized plugin name: [${pluginName}], please redefine it", null) + } + }) + } + /** + * Merge configuration + * @param self self configuration + * @param another another configuration + * @param pathPrefix path prefix + * @param updateWhenConflict update when conflict + * @return + */ + private def mergeConfig(self: Configuration, another: Configuration, pathPrefix: String, + updateWhenConflict: Boolean): Unit = { + val keys = another.getKeys + keys.asScala.foreach(key => { + val combineKey: String = if (StringUtils.isNotBlank(pathPrefix)) + StringUtils.join(util.Arrays.asList(pathPrefix, key), ".") else key + if (updateWhenConflict){ + self.set(combineKey, another.get(key)) + } else { + Option(self.get(combineKey)) match { + case Some(_) => + case _ => self.set(combineKey, another.get(key)) + } + } + }) + } + + /** + * Init the environment with configuration + * @param self self + */ + private def initEnvWithConfig(self: Configuration): Unit = { + ColumnCast.bind(self) + LoadUtil.bind(self) + } + /** + * Mask the job content + * @param self self configuration + * @return + */ + private def maskJobContent(self: Configuration): String = { + val contentWithSettings = self.getConfiguration(CODE_NAME).clone() + val content: Configuration = contentWithSettings.getConfiguration(JOB_CONTENT_NAME) + SecretUtils.filterSensitiveConfiguration(content) + contentWithSettings.set(JOB_CONTENT_NAME, content) + contentWithSettings.beautify() + } + + protected def closeDaemon(): Unit = { + if (daemonThread != null) daemonThread.cancel(true) + } + /** + * Container name + * @return + */ + def getContainerName: String + + + /** + * Create container + * @param config container configuration + * @param engineCreateContext engine create context + */ + def createContainer(config: Configuration, engineCreateContext: EngineCreationContext): AbstractContainer +} + +object DataxContainerOnceExecutor{ + + val CODE_NAME: String = "job" + + val JOB_CONTENT_NAME = "content" + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxExecutor.scala new file mode 100644 index 000000000..d82685f19 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxExecutor.scala @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.executor + +import org.apache.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor} +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.engineconnplugin.datax.exception.DataxJobExecutionException +import org.apache.linkis.manager.common.entity.resource.NodeResource +import org.apache.linkis.manager.label.entity.Label + +import java.util + +/** + * Datax executor with label + */ +trait DataxExecutor extends LabelExecutor with ResourceExecutor { + + private var executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]] + + override def getExecutorLabels(): util.List[Label[_]] = executorLabels + override def setExecutorLabels(labels: util.List[Label[_]]): Unit = this.executorLabels = labels + + override def requestExpectedResource(expectedResource: NodeResource): NodeResource = throw new DataxJobExecutionException("Not support method for requestExpectedResource.") + + protected val dataxEngineConnContext: DataxEngineConnContext + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxJobOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxJobOnceExecutor.scala new file mode 100644 index 000000000..a8794a53f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxJobOnceExecutor.scala @@ -0,0 +1,38 @@ +package org.apache.linkis.engineconnplugin.datax.executor + +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.AbstractContainer +import com.alibaba.datax.core.job.JobContainer +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext + +import java.util +import scala.collection.JavaConverters.asScalaSetConverter + +/** + * + * @param id id + * @param dataxEngineConnContext datax engine conn context + */ +class DataxJobOnceExecutor(override val id: Long, + override protected val dataxEngineConnContext: DataxEngineConnContext) extends DataxContainerOnceExecutor { + /** + * Container name + * + * @return + */ + override def getContainerName: String = "Job-Container" + + /** + * Container entity + * + * @param config container configuration + * @param engineCreateContext engine create context + */ + override def createContainer(config: Configuration, engineCreateContext: EngineCreationContext): AbstractContainer = { + config.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, "engineConn") + new JobContainer(config) + } +} \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxOnceExecutor.scala new file mode 100644 index 000000000..69fb4d69c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxOnceExecutor.scala @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.executor + +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.once.executor.{ManageableOnceExecutor, OnceExecutorExecutionContext} +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.manager.common.entity.resource.{CommonNodeResource, LoadResource, NodeResource} +import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf + +import scala.collection.JavaConversions.mapAsScalaMap + +trait DataxOnceExecutor extends ManageableOnceExecutor with DataxExecutor { + + val id: Long + + /** + * Submit entrance + * @param onceExecutorExecutionContext execution context + */ + override protected def submit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit = { + val options = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent.map { + case (k, v: String) => k -> v + case (k, v) if v != null => k -> v.toString + case (k, _) => k -> null + }.toMap + doSubmit(onceExecutorExecutionContext, options) + } + + def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit + + override protected val dataxEngineConnContext: DataxEngineConnContext + + override def getCurrentNodeResource(): NodeResource = { + val properties = EngineConnObject.getEngineCreationContext.getOptions + val resource = new LoadResource( + EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue(properties).toLong, + EngineConnPluginConf.JAVA_ENGINE_REQUEST_CORES.getValue(properties) + ) + val engineResource = new CommonNodeResource + engineResource.setUsedResource(resource) + engineResource + } + + override def ensureAvailable[A](f: => A): A = { + // Not need to throws exception + Utils.tryQuietly{ super.ensureAvailable(f) } + } + +} + + + + + + + + + + + diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxTaskGroupOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxTaskGroupOnceExecutor.scala new file mode 100644 index 000000000..6ad374adf --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxTaskGroupOnceExecutor.scala @@ -0,0 +1,27 @@ +package org.apache.linkis.engineconnplugin.datax.executor +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.AbstractContainer +import com.alibaba.datax.core.taskgroup.TaskGroupContainer +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext + +class DataxTaskGroupOnceExecutor(override val id: Long, + override protected val dataxEngineConnContext: DataxEngineConnContext) extends DataxContainerOnceExecutor { + /** + * Container name + * + * @return + */ + override def getContainerName: String = "TaskGroup-Container" + + /** + * Container entity + * + * @param config container configuration + * @param engineCreateContext engine create context + */ + override def createContainer(config: Configuration, engineCreateContext: EngineCreationContext): AbstractContainer = { + new TaskGroupContainer(config) + } + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxCodeExecutorFactory.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxCodeExecutorFactory.scala new file mode 100644 index 000000000..7f9d8b1f3 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxCodeExecutorFactory.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.factory + +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.once.executor.OnceExecutor +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorFactory +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.engineconnplugin.datax.executor.{DataxContainerOnceExecutor, DataxJobOnceExecutor, DataxTaskGroupOnceExecutor} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.{JAVA, RunType, SCALA} + +class DataxCodeExecutorFactory extends OnceExecutorFactory { + protected override def newExecutor(id: Int, + engineCreationContext: EngineCreationContext, + engineConn: EngineConn, + labels: Array[Label[_]]): OnceExecutor = { + engineConn.getEngineConnSession match { + case context: DataxEngineConnContext => + val isJob = !("taskGroup".equalsIgnoreCase(context.getCoreConfig + .getString(CoreConstant.DATAX_CORE_CONTAINER_MODEL))) + if (isJob) + new DataxJobOnceExecutor(id, context) + else new DataxTaskGroupOnceExecutor(id, context) + case _ => null + } + } + + override protected def getSupportRunTypes: Array[String] = Array(SCALA.toString, JAVA.toString) + + override protected def getRunType: RunType = RunType.SCALA +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxEngineConnFactory.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxEngineConnFactory.scala new file mode 100644 index 000000000..624944720 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxEngineConnFactory.scala @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.factory + +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration.CONFIG_PREFIX +import org.apache.linkis.engineconnplugin.datax.config.DataxCoreConfiguration._ +import org.apache.linkis.engineconnplugin.datax.config.DataxSettingConfiguration._ +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.engineconnplugin.datax.factory.DataxEngineConnFactory.{CORE_ARRAY_CONFIGS, CORE_VALUE_CONFIGS, SETTING_VALUE_CONFIGS} +import org.apache.linkis.engineconnplugin.datax.plugin.{DataxPluginDefinitionLoader, LocalDataxPluginDefinitionLoader} +import org.apache.linkis.manager.engineplugin.common.creation.{ExecutorFactory, MultiExecutorEngineConnFactory} +import org.apache.linkis.manager.label.entity.engine.EngineType +import org.apache.linkis.manager.label.entity.engine.EngineType.EngineType + +import java.util +import scala.collection.JavaConverters._ + +/** + * Datax engine conn factory + */ +class DataxEngineConnFactory extends MultiExecutorEngineConnFactory with Logging { + + /** + * Plugin loader + */ + private val pluginLoader: DataxPluginDefinitionLoader = LocalDataxPluginDefinitionLoader() + + override protected def getEngineConnType: EngineType = EngineType.DATAX + + override protected def createEngineConnSession(engineCreationContext: EngineCreationContext): Any = { + var options = engineCreationContext.getOptions + options = options.asScala.map{ + case (key, value) => + if (key.startsWith(CONFIG_PREFIX)){ + (key.replaceFirst(CONFIG_PREFIX, ""), value) + } else (key, value) + }.asJava + engineCreationContext.setOptions(options) + val coreConfig = createCoreConfiguration(engineCreationContext) + val settings = createSettingsConfiguration(engineCreationContext) + new DataxEngineConnContext(settings, coreConfig, pluginLoader.loadPlugin(engineCreationContext)) + } + + /** + * Core configuration + * @param engineCreationContext engine create context + * @return + */ + private def createCoreConfiguration(engineCreationContext: EngineCreationContext): Configuration = { + val configuration = Configuration.from("{}") + val options = engineCreationContext.getOptions + CORE_VALUE_CONFIGS.foreach(config => config.getValue(options) match { + case v: Any => configuration.set(config.key, v) + case _ => //Ignore the unexpected value + }) + CORE_ARRAY_CONFIGS.foreach(config => config.getValue(options) match { + case array: Array[String] => configuration.set(config.key, array) + case str: String => if (StringUtils.isNotBlank(str)) + configuration.set(config.key, util.Arrays.asList(str.split(","))) + case _ => //Ignore the unrecognized value + }) + Option(DataxConfiguration.JOB_EXECUTION_ID.getValue(options)) match { + case Some(executionId: String) => + configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, executionId) + case _ => + } + configuration + } + + /** + * Settings configuration + * @param engineCreationContext engine create context + * @return + */ + private def createSettingsConfiguration(engineCreationContext: EngineCreationContext): Configuration = { + val configuration = Configuration.from("{}") + SETTING_VALUE_CONFIGS.foreach(config => config.getValue(engineCreationContext.getOptions) match { + case v: Any => configuration.set(config.key, v) + case _ => //Ignore the unexpected value + }) + configuration + } + override protected def getDefaultExecutorFactoryClass: Class[_ <: ExecutorFactory] = { + classOf[DataxCodeExecutorFactory] + } + + override def getExecutorFactories: Array[ExecutorFactory] = { + val executorFactoryArray = Array[ExecutorFactory](new DataxCodeExecutorFactory) + executorFactoryArray + } + +} + +object DataxEngineConnFactory{ + /** + * Settings + */ + val SETTING_VALUE_CONFIGS: Array[CommonVars[_]] = Array(SETTING_SYNC_META, SETTING_TRANSPORT_TYPE, + SETTING_KEY_VERSION, SETTING_SPEED_BYTE, SETTING_SPEED_RECORD, + SETTING_SPEED_CHANNEL, SETTING_ERROR_LIMIT_RECORD, SETTING_USE_PROCESSOR + ) + + /** + * Core + */ + val CORE_VALUE_CONFIGS: Array[CommonVars[_]] = + Array(CORE_STATISTICS_REPORTER_PLUGIN_CLASS, COMMON_COLUMN_DATETIME_FORMAT, COMMON_COLUMN_TIME_FORMAT, COMMON_COLUMN_DATE_FORMAT, + COMMON_COLUMN_TIMEZONE, COMMON_COLUMN_ENCODING, CORE_TRANSPORT_TYPE, CORE_TRANSPORT_CHANNEL_SPEED_BYTE, + CORE_TRANSPORT_CHANNEL_SPEED_RECORD, CORE_TRANSPORT_CHANNEL_FLOW_CONTROL_INTERNAL, CORE_TRANSPORT_CHANNEL_CAPACITY, + CORE_TRANSPORT_CHANNEL_BYTE_CAPACITY, CORE_TRANSPORT_RECORD_CHANNEL_CLASS, CORE_TRANSPORT_RECORD_EXCHANGER_CLASS, + CORE_TRANSPORT_RECORD_EXCHANGER_BUFFER_SIZE, CORE_TRANSPORT_STREAM_CHANNEL_CLASS, CORE_TRANSPORT_STREAM_CHANNEL_BLOCK_SIZE, + CORE_CONTAINER_JOB_REPORT_INTERVAL, CORE_CONTAINER_JOB_SLEEP_INTERNAL, CORE_CONTAINER_TASK_GROUP_REPORT_INTERVAL, + CORE_CONTAINER_TASK_GROUP_SLEEP_INTERNAL, CORE_CONTAINER_TASK_GROUP_CHANNEL, CORE_CONTAINER_TRACE_ENABLE, + CORE_STATISTICS_COLLECTOR_PLUGIN_TASK_CLASS, CORE_STATISTICS_COLLECTOR_PLUGIN_MAX_DIRTY_NUMBER, + CORE_PROCESSOR_LOADER_PLUGIN_CLASS, CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE, CORE_PROCESSOR_LOADER_PLUGIN_SOURCE_PATH, CORE_CONTAINER_MODEL + ) + + val CORE_ARRAY_CONFIGS: Array[CommonVars[_]] = Array(COMMON_COLUMN_EXTRA_FORMATS) +} \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/launch/DataxEngineConnLaunchBuilder.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/launch/DataxEngineConnLaunchBuilder.scala new file mode 100644 index 000000000..da194c0bb --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/launch/DataxEngineConnLaunchBuilder.scala @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.launch + +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.plugin.{PluginBmlResource, PluginResource} +import org.apache.linkis.manager.common.protocol.bml.BmlResource +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment.{PWD, variable} +import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder + +import java.util +import java.util.Base64 +import scala.collection.mutable.ArrayBuffer + +/** + * Datax engine conn launch builder + * (use public module lib) + */ +class DataxEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder { + + protected override def getCommands(implicit engineConnBuildRequest: EngineConnBuildRequest): Array[String] = { + // CD to the worker space directory + var commands = new ArrayBuffer[String]() + commands += "cd" + commands += variable(PWD) + commands += "&&" + commands = commands ++ super.getCommands + commands.toArray + } + + protected override def getBmlResources(implicit engineConnBuildRequest: EngineConnBuildRequest): util.List[BmlResource] = { + val bmlResources = new util.ArrayList[BmlResource](super.getBmlResources) + val props = engineConnBuildRequest.engineConnCreationDesc.properties + DataxConfiguration.PLUGIN_RESOURCES.getValue(props) match { + case resources: String => + if (StringUtils.isNotBlank(resources)) { + val mapper = JsonUtils.jackson + val pluginBmlResources: Array[PluginBmlResource] = mapper.readValue(resources, + mapper.getTypeFactory.constructArrayType(classOf[PluginBmlResource])) + Option(pluginBmlResources).foreach(pluginBmlResources => pluginBmlResources.foreach(pluginBmlResource => { + // Convert to bml resources + val bmlResource = new BmlResource + bmlResource.setFileName(pluginBmlResource.getName) + bmlResource.setResourceId(pluginBmlResource.getResourceId) + bmlResource.setVersion(pluginBmlResource.getVersion) + bmlResource.setOwner(pluginBmlResource.getCreator) + pluginBmlResource.getPath match { + case "." => + bmlResource.setVisibility(BmlResource.BmlResourceVisibility.Private) + case _ => + // Importance: major module must be a public bml resource + bmlResource.setVisibility(BmlResource.BmlResourceVisibility.Public) + } + bmlResources.add(bmlResource) + })) + // Encoding the resources json + props.put(DataxConfiguration.PLUGIN_RESOURCES.key, Base64.getEncoder.encodeToString(resources.getBytes("utf-8"))) + } + } + bmlResources + } + +} \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/params/DataxParamsResolver.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/params/DataxParamsResolver.scala new file mode 100644 index 000000000..74472795b --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/params/DataxParamsResolver.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.params + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext + +import java.util + +/** + * Resolve the engine job params + */ +trait DataxParamsResolver { + + /** + * main method + * @param params input + * @return + */ + def resolve(params: util.Map[String, Object], context: EngineCreationContext): util.Map[String, Object] +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/DataxPluginDefinitionLoader.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/DataxPluginDefinitionLoader.scala new file mode 100644 index 000000000..e8fc57e0c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/DataxPluginDefinitionLoader.scala @@ -0,0 +1,17 @@ +package org.apache.linkis.engineconnplugin.datax.plugin + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.context.DataxPluginDefinition +import java.util +/** + * Plugin definition loader + */ +trait DataxPluginDefinitionLoader { + + /** + * Load plugin + * @param engineCreationContext engine create context + * @return + */ + def loadPlugin(engineCreationContext: EngineCreationContext): util.List[DataxPluginDefinition] +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/LocalDataxPluginDefinitionLoader.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/LocalDataxPluginDefinitionLoader.scala new file mode 100644 index 000000000..3ee0f4774 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/LocalDataxPluginDefinitionLoader.scala @@ -0,0 +1,88 @@ +package org.apache.linkis.engineconnplugin.datax.plugin +import com.alibaba.datax.common.util.Configuration +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.{JsonUtils, Logging} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.context.DataxPluginDefinition +import org.apache.linkis.engineconnplugin.datax.exception.DataxPluginLoadException +import org.apache.linkis.engineconnplugin.datax.plugin.LocalDataxPluginDefinitionLoader.{PLUGIN_JSON_NAME, PLUGIN_NAME, PLUGIN_PATH} +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment + +import java.io.File +import java.util +import java.util.Base64 +/** + * Local plugin definition loader + */ +class LocalDataxPluginDefinitionLoader extends DataxPluginDefinitionLoader with Logging{ + /** + * Load plugin + * + * @param engineCreationContext engine create context + * @return + */ + override def loadPlugin(engineCreationContext: EngineCreationContext): util.List[DataxPluginDefinition] = { + val options = engineCreationContext.getOptions + val plugins = new util.ArrayList[DataxPluginDefinition]() + val pluginDefineSet: util.Set[String] = new util.HashSet[String]() + DataxConfiguration.PLUGIN_RESOURCES.getValue(options) match { + case encryptRes: String => + if (StringUtils.isNotBlank(encryptRes)) { + // First to decode the resources + val resources = new String(Base64.getDecoder.decode(encryptRes), "utf-8"); + val mapper = JsonUtils.jackson + val pluginResources: Array[PluginResource] = mapper.readValue(resources, + mapper.getTypeFactory.constructArrayType(classOf[PluginResource])) + val workDir = CommonVars(Environment.PWD.toString, "").getValue + if (StringUtils.isBlank(workDir)) { + throw new DataxPluginLoadException(s"Cannot get the working directory from variable: 'PWD' in datax engine conn environment", null) + } + Option(pluginResources).foreach(resources => resources.foreach( + resource => Option(convertPluginResourceToDefine(pluginDefineSet, resource, workDir)) + .foreach(definition => plugins.add(definition)))) + } + case _ => + } + plugins + } + + private def convertPluginResourceToDefine(pluginDefineSet: util.Set[String], resource: PluginResource, workDir: String): DataxPluginDefinition = { + // Skip the path has value '.' + resource.getPath match { + case "." => null + case _ => + // Search and load the resource definition at work directory + val resLocalFile = new File(workDir, new File(resource.getPath).getName) + if (resLocalFile.isDirectory) { + val pluginConf: Configuration = Configuration.from(new File(resLocalFile.getPath, PLUGIN_JSON_NAME)) + val pluginName: String = pluginConf.getString(PLUGIN_NAME) + var pluginPath: String = pluginConf.getString(PLUGIN_PATH) + if (pluginDefineSet.contains(pluginName)) { + throw new DataxPluginLoadException(s"Fail to load plugin [name: ${pluginName}, path: ${pluginPath}], duplicated plugin exists", null) + } + pluginDefineSet.add(pluginName) + if (StringUtils.isBlank(pluginPath)) { + pluginPath = resLocalFile.getPath + pluginConf.set(PLUGIN_PATH, pluginPath) + } + new DataxPluginDefinition(pluginName, pluginPath, pluginConf) + } else { + warn(s"Cannot find the plugin resource in path: [${resLocalFile.getPath}], please examine the working directory: [${workDir}]") + null + } + } + } +} + +object LocalDataxPluginDefinitionLoader{ + + val PLUGIN_JSON_NAME = "plugin.json" + + val PLUGIN_PATH = "path" + + val PLUGIN_NAME = "name" + def apply(): LocalDataxPluginDefinitionLoader = new LocalDataxPluginDefinitionLoader() + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/BasicDataxReportReceiver.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/BasicDataxReportReceiver.scala new file mode 100644 index 000000000..1e1f36052 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/BasicDataxReportReceiver.scala @@ -0,0 +1,81 @@ +package org.apache.linkis.engineconnplugin.datax.report +import com.alibaba.datax.core.statistics.communication.{Communication, CommunicationTool} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.protocol.engine.JobProgressInfo + +import java.util + +/** + * Basic datax report receiver + */ +class BasicDataxReportReceiver extends DataxReportReceiver { + + private var jobId: String =_ + /** + * Just store the last communication + */ + private var lastCommunication: Communication = _ + /** + * Receive communication + * + * @param communication communication + */ + override def receive(jobId: String, communication: Communication): Unit = { + if (StringUtils.isNotBlank(jobId)){ + this.jobId = jobId + } + // Update + this.lastCommunication = communication + } + + /** + * Progress value + * + * @return + */ + override def getProgress: Float = { + Option(this.lastCommunication) match { + case Some(communication) => + communication.getDoubleCounter(CommunicationTool.PERCENTAGE).floatValue() + case _ => 0f + } + } + + /** + * Progress info + * + * @return + */ +override def getProgressInfo: Array[JobProgressInfo] = { + // datax does not have failed task + var totalTask: Long = 0 + var finishTask: Long = 0 + Option(this.lastCommunication) match { + case Some(communication) => + // Just statistics the total job + finishTask = communication.getLongCounter(CommunicationTool.STAGE) + // reverse calculate + val percentage = communication.getDoubleCounter(CommunicationTool.PERCENTAGE) + totalTask = (finishTask.toDouble / percentage).toInt + case _ => + } + Array(JobProgressInfo(this.jobId, totalTask.toInt, (totalTask - finishTask).toInt, 0, finishTask.toInt)) +} + + /** + * Metrics info + * + * @return + */ + override def getMetrics: util.Map[String, Any] = { + // Convert the whole counter in communication + Option(this.lastCommunication) match { + case Some(communication) => + val counter = communication.getCounter + counter.asInstanceOf[util.Map[String, Any]] + case _ => new util.HashMap[String, Any]() + } + } + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxEngineConnCommunicateReporter.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxEngineConnCommunicateReporter.scala new file mode 100644 index 000000000..356bf421f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxEngineConnCommunicateReporter.scala @@ -0,0 +1,40 @@ +package org.apache.linkis.engineconnplugin.datax.report + +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.statistics.communication.{Communication, LocalTGCommunicationManager} +import com.alibaba.datax.core.statistics.container.report.AbstractReporter +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager +import org.apache.linkis.engineconnplugin.datax.executor.DataxContainerOnceExecutor + +import java.lang + +/** + * Communication reporter for datax engine conn + */ +class DataxEngineConnCommunicateReporter(configuration: Configuration) extends AbstractReporter{ + + + /** + * Report the job communication + * @param jobId job id + * @param communication communication + */ + override def reportJobCommunication(jobId: lang.Long, communication: Communication): Unit = { + OnceExecutorManager.getInstance.getReportExecutor match{ + case executor: DataxContainerOnceExecutor => + executor.getReportReceiver.receive(jobId.toString, communication) + case _ => + } + } + + /** + * Report the task group communication + * @param taskGroupId task group id + * @param communication communication + */ + override def reportTGCommunication(taskGroupId: Integer, communication: Communication): Unit = { + LocalTGCommunicationManager.updateTaskGroupCommunication(taskGroupId, communication) + } + + def getConfiguration: Configuration = this.configuration +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportQuota.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportQuota.scala new file mode 100644 index 000000000..1b304ce11 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportQuota.scala @@ -0,0 +1,28 @@ +package org.apache.linkis.engineconnplugin.datax.report + +import org.apache.linkis.protocol.engine.JobProgressInfo +import java.util +/** + * Quota interface + */ +trait DataxReportQuota { + + /** + * Progress value + * @return + */ + def getProgress: Float + + /** + * Progress info + * @return + */ + def getProgressInfo: Array[JobProgressInfo] + + /** + * Metrics info + * @return + */ + def getMetrics: util.Map[String, Any] + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportReceiver.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportReceiver.scala new file mode 100644 index 000000000..1e539c0d5 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportReceiver.scala @@ -0,0 +1,15 @@ +package org.apache.linkis.engineconnplugin.datax.report + +import com.alibaba.datax.core.statistics.communication.Communication + +/** + * Report receiver + */ +trait DataxReportReceiver extends DataxReportQuota { + /** + * Receive communication + * @param communication communication + */ + def receive(jobId: String, communication: Communication): Unit + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/resource/DataxEngineConnResourceFactory.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/resource/DataxEngineConnResourceFactory.scala new file mode 100644 index 000000000..8bb7cdf22 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/resource/DataxEngineConnResourceFactory.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.resource + +import org.apache.linkis.manager.common.entity.resource.{LoadInstanceResource, Resource} +import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf +import org.apache.linkis.manager.engineplugin.common.resource.AbstractEngineResourceFactory + +import java.util + +/** + * Resource factory + */ +class DataxEngineConnResourceFactory extends AbstractEngineResourceFactory { + + override protected def getRequestResource(properties: util.Map[String, String]): Resource = { + // Just use local resource + new LoadInstanceResource(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue(properties).toLong, + EngineConnPluginConf.JAVA_ENGINE_REQUEST_CORES.getValue(properties), 1) + } +} diff --git a/exchangis-plugins/engine/sqoop/pom.xml b/exchangis-engines/engineconn-plugins/sqoop/pom.xml similarity index 93% rename from exchangis-plugins/engine/sqoop/pom.xml rename to exchangis-engines/engineconn-plugins/sqoop/pom.xml index 345ae5443..ee8398bf7 100644 --- a/exchangis-plugins/engine/sqoop/pom.xml +++ b/exchangis-engines/engineconn-plugins/sqoop/pom.xml @@ -20,17 +20,19 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - linkis - org.apache.linkis - 1.1.1 - + exchangis-engines + com.webank.wedatasphere.exchangis + 1.1.2 + ../../pom.xml 4.0.0 linkis-engineplugin-sqoop 1.4.6 - 3.1.2 + 3.1.3 + 3.3.4 + 1.4.0 @@ -46,12 +48,6 @@ hadoop200 ${sqoop.version} - - org.apache.commons - commons-exec - provided - 1.3 - org.apache.avro avro @@ -184,6 +180,10 @@ hadoop-hdfs org.apache.hadoop + + org.apache.hadoop + hadoop-hdfs + @@ -191,39 +191,24 @@ linkis-engineconn-plugin-core ${linkis.version} - org.apache.linkis linkis-rpc ${linkis.version} provided - org.apache.linkis linkis-storage ${linkis.version} provided - org.apache.linkis linkis-common ${linkis.version} provided - - - org.apache.linkis - linkis-bml-engine-hook - ${linkis.version} - - - commons-logging - commons-logging - - - @@ -232,7 +217,6 @@ org.apache.maven.plugins maven-deploy-plugin - net.alchim31.maven scala-maven-plugin diff --git a/exchangis-plugins/engine/sqoop/src/main/assembly/distribution.xml b/exchangis-engines/engineconn-plugins/sqoop/src/main/assembly/distribution.xml similarity index 99% rename from exchangis-plugins/engine/sqoop/src/main/assembly/distribution.xml rename to exchangis-engines/engineconn-plugins/sqoop/src/main/assembly/distribution.xml index 78f54c4c0..a88001e18 100644 --- a/exchangis-plugins/engine/sqoop/src/main/assembly/distribution.xml +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/assembly/distribution.xml @@ -32,7 +32,7 @@ - /dist/v${sqoop.version}/lib + /dist/${sqoop.version}/lib true true false diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java diff --git a/exchangis-plugins/engine/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java rename to exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java diff --git a/exchangis-plugins/engine/sqoop/src/main/resources/linkis-engineconn.properties b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/resources/linkis-engineconn.properties rename to exchangis-engines/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/log4j2.xml b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/log4j2.xml new file mode 100644 index 000000000..3b45ae2a1 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/log4j2.xml @@ -0,0 +1,82 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala similarity index 93% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala index 52266bb71..ae322a3f5 100644 --- a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala @@ -25,16 +25,17 @@ import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.engineconnplugin.sqoop.factory.SqoopEngineConnFactory import org.apache.linkis.engineconnplugin.sqoop.launch.SqoopEngineConnLaunchBuilder +import java.util.Map +import java.util.List class SqoopEngineConnPlugin extends EngineConnPlugin{ private val EP_CONTEXT_CONSTRUCTOR_LOCK = new Object() private var engineResourceFactory: EngineResourceFactory = _ private var engineConnLaunchBuilder: EngineConnLaunchBuilder = _ private var engineConnFactory: EngineConnFactory = _ - override def init(params: java.util.Map[String, Any]): Unit = {} + override def init(params: Map[String, AnyRef]): Unit = {} override def getEngineResourceFactory: EngineResourceFactory = { - EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized{ if(null == engineResourceFactory){ engineResourceFactory = new GenericEngineResourceFactory @@ -62,5 +63,6 @@ class SqoopEngineConnPlugin extends EngineConnPlugin{ } } - override def getDefaultLabels: java.util.List[Label[_]] = new java.util.ArrayList[Label[_]] + override def getDefaultLabels: List[Label[_]] = new java.util.ArrayList[Label[_]] + } diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala diff --git a/exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala similarity index 100% rename from exchangis-plugins/engine/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala rename to exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala diff --git a/exchangis-plugins/engine/datax/datax-assembly/package.xml b/exchangis-engines/engines/datax/datax-assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-assembly/package.xml rename to exchangis-engines/engines/datax/datax-assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-assembly/pom.xml b/exchangis-engines/engines/datax/datax-assembly/pom.xml similarity index 94% rename from exchangis-plugins/engine/datax/datax-assembly/pom.xml rename to exchangis-engines/engines/datax/datax-assembly/pom.xml index d4c7e1fc2..74b755512 100644 --- a/exchangis-plugins/engine/datax/datax-assembly/pom.xml +++ b/exchangis-engines/engines/datax/datax-assembly/pom.xml @@ -3,9 +3,9 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 diff --git a/exchangis-plugins/engine/datax/datax-core/pom.xml b/exchangis-engines/engines/datax/datax-core/pom.xml similarity index 91% rename from exchangis-plugins/engine/datax/datax-core/pom.xml rename to exchangis-engines/engines/datax/datax-core/pom.xml index a0cfd11b6..48be143d3 100644 --- a/exchangis-plugins/engine/datax/datax-core/pom.xml +++ b/exchangis-engines/engines/datax/datax-core/pom.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 jar - ${datax.engine.version} + 3.0.0-Plus-2 datax-core @@ -19,8 +19,12 @@ 1.0.15 - com.alibaba - fastjson + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind commons-configuration @@ -75,6 +79,7 @@ ch.qos.logback logback-classic + provided org.slf4j @@ -122,21 +127,21 @@ dm 16 system - ${basedir}/src/main/lib/Dm7JdbcDriver16.jar + ${pom.basedir}/src/main/lib/Dm7JdbcDriver16.jar com.sybase jconn3 1.0.0-SNAPSHOT system - ${basedir}/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar + ${pom.basedir}/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar ppas ppas 16 system - ${basedir}/src/main/lib/edb-jdbc16.jar + ${pom.basedir}/src/main/lib/edb-jdbc16.jar diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-core/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-core/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/bin/datax.py b/exchangis-engines/engines/datax/datax-core/src/main/bin/datax.py similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/bin/datax.py rename to exchangis-engines/engines/datax/datax-core/src/main/bin/datax.py diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/bin/dxprof.py b/exchangis-engines/engines/datax/datax-core/src/main/bin/dxprof.py similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/bin/dxprof.py rename to exchangis-engines/engines/datax/datax-core/src/main/bin/dxprof.py diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/bin/perftrace.py b/exchangis-engines/engines/datax/datax-core/src/main/bin/perftrace.py similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/bin/perftrace.py rename to exchangis-engines/engines/datax/datax-core/src/main/bin/perftrace.py diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java similarity index 93% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java index c8d580e17..c5a121bd3 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java @@ -1,6 +1,7 @@ package com.alibaba.datax.common.element; -import com.alibaba.fastjson.JSON; + +import com.webank.wedatasphere.exchangis.datax.util.Json; import java.math.BigDecimal; import java.math.BigInteger; @@ -66,7 +67,7 @@ protected void setByteSize(int byteSize) { @Override public String toString() { - return JSON.toJSONString(this); + return Json.toJson(this, null); } public enum Type { diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java similarity index 91% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java index 8679ffb47..0cd4ad7e2 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java @@ -15,7 +15,8 @@ public enum CommonErrorCode implements ErrorCode { HOOK_INTERNAL_ERROR("Common-12", "Hook运行错误 ."), SHUT_DOWN_TASK("Common-20", "Task收到了shutdown指令,为failover做准备"), WAIT_TIME_EXCEED("Common-21", "等待时间超出范围"), - TASK_HUNG_EXPIRED("Common-22", "任务hung住,Expired"); + TASK_HUNG_EXPIRED("Common-22", "任务hung住,Expired"), + UNSUPPORTED_METHOD("Commmon-23", "暂不支持该方法"); private final String code; diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java index b93d96c68..c6d5d2a1a 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java @@ -3,8 +3,7 @@ import com.alibaba.datax.common.exception.CommonErrorCode; import com.alibaba.datax.common.exception.DataXException; import com.alibaba.datax.common.spi.ErrorCode; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.CharUtils; import org.apache.commons.lang3.StringUtils; @@ -210,7 +209,12 @@ public String getString(final String path) { if (null == string) { return null; } - return String.valueOf(string); + Class clazz = string.getClass(); + if(clazz.equals(String.class) || + clazz.isPrimitive() || isWrapClass(clazz)){ + return String.valueOf(string); + } + return Json.toJson(string, null); } /** @@ -576,8 +580,7 @@ public T get(final String path, Class clazz) { * 格式化Configuration输出 */ public String beautify() { - return JSON.toJSONString(this.getInternal(), - SerializerFeature.PrettyFormat); + return Json.toJson(this.getInternal(), null, true); } /** @@ -1059,7 +1062,7 @@ private static void checkJSON(final String json) { private Configuration(final String json) { try { - this.root = JSON.parse(json); + this.root = Json.fromJson(json, Object.class); } catch (Exception e) { throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, String.format("配置信息错误. 您提供的配置信息不是合法的JSON格式: %s . 请按照标准json格式提供配置信息. ", e.getMessage())); @@ -1067,11 +1070,18 @@ private Configuration(final String json) { } private static String toJSONString(final Object object) { - return JSON.toJSONString(object); + return Json.toJson(object, null); } public Set getSecretKeyPathSet() { return secretKeyPathSet; } + private static boolean isWrapClass(Class clz){ + try{ + return ((Class)clz.getField("TYPE").get(null)).isPrimitive(); + }catch (Exception e){ + return false; + } + } } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java index 054a3ef39..b2e346b4d 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java @@ -44,6 +44,8 @@ public class Engine { /* check job model (job/task) first */ public void start(Configuration allConf) { + //todo 放在DataxOnceExecutor实现 + // 绑定column转换信息 ColumnCast.bind(allConf); diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java similarity index 95% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java index 4e967e822..c71523ece 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java @@ -34,7 +34,7 @@ import com.alibaba.datax.core.util.container.CoreConstant; import com.alibaba.datax.core.util.container.LoadUtil; import com.alibaba.datax.dataxservice.face.domain.enums.ExecuteMode; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.FileFileFilter; import org.apache.commons.lang.StringUtils; @@ -465,14 +465,14 @@ private int split() { //change the channel speed when channel speed * taskNumber List transformerList = this.configuration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT_TRANSFORMER); - LOG.debug("transformer configuration: " + JSON.toJSONString(transformerList)); + LOG.debug("transformer configuration: " + Json.toJson(transformerList, null)); //input: reader parameter list and writer task list(contain properties: parameter, name and processor) //output: "content" array List contentConfig = mergeReaderAndWriterTaskConfigs( readerTaskConfigs, writerTaskConfigs, transformerList); - LOG.debug("contentConfig configuration: " + JSON.toJSONString(contentConfig)); + LOG.debug("contentConfig configuration: " + Json.toJson(contentConfig, null)); this.configuration.set(CoreConstant.DATAX_JOB_CONTENT, contentConfig); @@ -720,33 +720,34 @@ private void logStatistics() { } //report to server - try { - HttpClientUtil httpClientUtil = HttpClientUtil.getHttpClientUtil(); - Map report =new HashMap<>(10); - report.put("id", configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID)); - report.put("byteSpeedPerSecond",byteSpeedPerSecond); - report.put("recordSpeedPerSecond",recordSpeedPerSecond); - report.put("totalCosts",totalCosts); - report.put("totalErrorRecords",CommunicationTool.getTotalErrorRecords(communication)); - report.put("totalReadRecords",CommunicationTool.getTotalReadRecords(communication)); - report.put("totalReadBytes", CommunicationTool.getTotalReadBytes(communication)); - report.put("transformerFailedRecords",communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS)); - report.put("transformerFilterRecords",communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS)); - report.put("transformerTotalRecords",communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS)); - StringEntity entity = new StringEntity(JSON.toJSONString(report)); - entity.setContentEncoding("UTF-8"); - entity.setContentType("application/json"); - HttpPost post = HttpClientUtil.getPostRequest(configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_PROTOCOL) - + "://" + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ADDRESS) - + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT), - entity, - "Content-Type", "application/json;charset=UTF-8"); - String response = httpClientUtil.executeAndGet(post, String.class); - LOG.info("Send report respone,{}",response); - }catch (Exception e){ - LOG.error("Post report error",e); + if (StringUtils.isNotBlank(configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT))) { + try { + HttpClientUtil httpClientUtil = HttpClientUtil.getHttpClientUtil(); + Map report = new HashMap<>(10); + report.put("id", configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID)); + report.put("byteSpeedPerSecond", byteSpeedPerSecond); + report.put("recordSpeedPerSecond", recordSpeedPerSecond); + report.put("totalCosts", totalCosts); + report.put("totalErrorRecords", CommunicationTool.getTotalErrorRecords(communication)); + report.put("totalReadRecords", CommunicationTool.getTotalReadRecords(communication)); + report.put("totalReadBytes", CommunicationTool.getTotalReadBytes(communication)); + report.put("transformerFailedRecords", communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS)); + report.put("transformerFilterRecords", communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS)); + report.put("transformerTotalRecords", communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS)); + StringEntity entity = new StringEntity(Json.toJson(report, null)); + entity.setContentEncoding("UTF-8"); + entity.setContentType("application/json"); + HttpPost post = HttpClientUtil.getPostRequest(configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_PROTOCOL) + + "://" + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ADDRESS) + + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT), + entity, + "Content-Type", "application/json;charset=UTF-8"); + String response = httpClientUtil.executeAndGet(post, String.class); + LOG.info("Send report respone,{}", response); + } catch (Exception e) { + LOG.error("Post report error", e); + } } - } /** @@ -1213,7 +1214,7 @@ private List doLoadProcessor(String namespace){ } }); } - LOG.info("Loading processors finished, " + JSON.toJSONString(processors)); + LOG.info("Loading processors finished, " + Json.toJson(processors, null)); return processors; } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java index 59a0ea6ca..5a106c7e2 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java @@ -2,7 +2,7 @@ import com.alibaba.datax.common.statistics.PerfTrace; import com.alibaba.datax.common.util.StrUtil; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang.Validate; import java.text.DecimalFormat; @@ -220,7 +220,7 @@ public static String getSnapshot(Communication communication) { pair = getWaitWriterTime(communication); state.put((String) pair.getKey(), pair.getValue()); - return JSON.toJSONString(state); + return Json.toJson(state, null); } private static Pair getTotalBytes(final Communication communication) { diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java similarity index 86% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java index 7044794ed..b05ac38c0 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java @@ -5,7 +5,9 @@ import com.alibaba.datax.core.statistics.communication.CommunicationTool; import com.alibaba.datax.core.statistics.container.collector.ProcessInnerCollector; import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import com.alibaba.datax.core.statistics.container.report.AbstractReporter; import com.alibaba.datax.core.statistics.container.report.ProcessInnerReporter; +import com.alibaba.datax.core.util.ClassUtil; import com.alibaba.datax.core.util.container.CoreConstant; import com.alibaba.datax.dataxservice.face.domain.enums.State; import org.slf4j.Logger; @@ -22,7 +24,9 @@ public StandAloneJobContainerCommunicator(Configuration configuration) { super(configuration); super.setCollector(new ProcessInnerCollector(configuration.getLong( CoreConstant.DATAX_CORE_CONTAINER_JOB_ID))); - super.setReporter(new ProcessInnerReporter(configuration)); + // Set the reporter defined in configuration + super.setReporter(ClassUtil.instantiate(configuration.getString(CoreConstant.DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS), + AbstractReporter.class, configuration)); } @Override diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java similarity index 60% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java index 82dba8bdf..8ea0ac5d6 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java @@ -2,13 +2,18 @@ import com.alibaba.datax.common.util.Configuration; import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.container.report.AbstractReporter; import com.alibaba.datax.core.statistics.container.report.ProcessInnerReporter; +import com.alibaba.datax.core.util.ClassUtil; +import com.alibaba.datax.core.util.container.CoreConstant; public class StandaloneTGContainerCommunicator extends AbstractTGContainerCommunicator { public StandaloneTGContainerCommunicator(Configuration configuration) { super(configuration); - super.setReporter(new ProcessInnerReporter(configuration)); + // Set the reporter defined in configuration + super.setReporter(ClassUtil.instantiate(configuration.getString(CoreConstant.DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS), + AbstractReporter.class, configuration)); } @Override diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java similarity index 96% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java index d64784cf8..a54ae4fb7 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java @@ -6,7 +6,7 @@ import com.alibaba.datax.core.statistics.communication.Communication; import com.alibaba.datax.core.statistics.plugin.task.util.DirtyRecord; import com.alibaba.datax.core.util.container.CoreConstant; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,7 +53,7 @@ private String formatDirty(final Record dirty, final Throwable t, .getColumns()); } - return JSON.toJSONString(msgGroup); + return Json.toJson(msgGroup, null); } @Override diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java index 8d95c398f..4ced82ca5 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java @@ -4,7 +4,7 @@ import com.alibaba.datax.common.element.Record; import com.alibaba.datax.common.exception.DataXException; import com.alibaba.datax.core.util.FrameworkErrorCode; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import java.math.BigDecimal; import java.math.BigInteger; @@ -32,7 +32,7 @@ public void addColumn(Column column) { @Override public String toString() { - return JSON.toJSONString(this.columns); + return Json.toJson(this.columns, null); } @Override diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java index 43d378bd6..76cfa836e 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java @@ -31,7 +31,7 @@ import com.alibaba.datax.core.util.container.CoreConstant; import com.alibaba.datax.core.util.container.LoadUtil; import com.alibaba.datax.dataxservice.face.domain.enums.State; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -145,7 +145,7 @@ public void start() { if (LOG.isDebugEnabled()) { LOG.debug("taskGroup[{}]'s task configs[{}]", this.taskGroupId, - JSON.toJSONString(taskConfigs)); + Json.toJson(taskConfigs, null)); } int taskCountInThisTaskGroup = taskConfigs.size(); diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java index 808fe8a25..3d6446702 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java @@ -5,7 +5,7 @@ import com.alibaba.datax.common.exception.DataXException; import com.alibaba.datax.core.util.ClassSize; import com.alibaba.datax.core.util.FrameworkErrorCode; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import java.util.ArrayList; import java.util.HashMap; @@ -68,7 +68,7 @@ public String toString() { Map json = new HashMap(); json.put("size", this.getColumnNumber()); json.put("data", this.columns); - return JSON.toJSONString(json); + return Json.toJson(json, null); } @Override diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PrecisionTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PrecisionTransformer.java new file mode 100644 index 000000000..0ab4fda6d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PrecisionTransformer.java @@ -0,0 +1,56 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.element.StringColumn; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; + +import java.math.BigDecimal; +import java.util.Arrays; + +public class PrecisionTransformer extends Transformer { + public PrecisionTransformer() { + setTransformerName("dx_precision"); + } + + @Override + public Record evaluate(Record record, Object... paras) { + + int columnIndex; + int precision; + try { + if (paras.length != 2) { + throw new RuntimeException("dx_precision paras must be 2"); + } + + columnIndex = (Integer) paras[0]; + precision = Integer.valueOf((String) paras[1]); + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "paras:" + Arrays.asList(paras).toString() + " => " + e.getMessage()); + } + + Column column = record.getColumn(columnIndex); + + try { + String oriValue = column.asString(); + + //如果字段为空,跳过replace处理 + if (oriValue == null) { + return record; + } + BigDecimal oriNum = new BigDecimal(oriValue); + BigDecimal zeroNum = new BigDecimal("0"); + if(oriNum.doubleValue() == zeroNum.doubleValue()){ + record.setColumn(columnIndex, new StringColumn("0")); + } + else { + BigDecimal newValue = new BigDecimal(oriValue).setScale(precision, BigDecimal.ROUND_DOWN); + record.setColumn(columnIndex, new StringColumn(newValue.toString())); + } + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_RUN_EXCEPTION, e.getMessage(), e); + } + return record; + } +} diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java index b12367b44..9a657b691 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java @@ -36,6 +36,7 @@ public class TransformerRegistry { registTransformer(new ReplaceTransformer()); registTransformer(new FilterTransformer()); registTransformer(new GroovyTransformer()); + registTransformer(new PrecisionTransformer()); } public static void loadTransformerFromLocalStorage() { diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java index dc654a822..1ce02b4d1 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java @@ -80,12 +80,15 @@ public class CoreConstant { public static final String DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_MAXDIRTYNUM = "core.statistics.collector.plugin.maxDirtyNumber"; + public static final String DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS = "core.statistics.reporter.plugin.class"; + public static final String DATAX_CORE_PROCESSOR_LOADER_PLUGIN_CLASS = "core.processor.loader.plugin.class"; public static final String DATAX_CORE_PROCESSOR_LODAER_PLUGIN_SOURCEPATH = "core.processor.loader.plugin.sourcePath"; public static final String DATAX_CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE = "core.processor.loader.plugin.package"; + public static final String DATAX_JOB_CONTENT_READER_NAME = "job.content[0].reader.name"; public static final String DATAX_JOB_CONTENT_READER_PARAMETER = "job.content[0].reader.parameter"; diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java similarity index 71% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java index abea439e3..a23af0d49 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java @@ -96,39 +96,39 @@ public boolean accept(File pathname) { return jarURLs; } - /** - * change the order to load class - * @param name - * @param resolve - * @return - * @throws ClassNotFoundException - */ - @Override - public Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - synchronized (getClassLoadingLock(name)){ - //First, check if the class has already been loaded - Class c = findLoadedClass(name); - if(c == null){ - long t0 = System.nanoTime(); - try { - //invoke findClass in this class - c = findClass(name); - }catch(ClassNotFoundException e){ - // ClassNotFoundException thrown if class not found - } - if(c == null){ - return super.loadClass(name, resolve); - } - //For compatibility with higher versions > java 1.8.0_141 -// sun.misc.PerfCounter.getFindClasses().addElapsedTimeFrom(t0); -// sun.misc.PerfCounter.getFindClasses().increment(); - } - if(resolve){ - resolveClass(c); - } - return c; - } - } +// /** +// * change the order to load class +// * @param name +// * @param resolve +// * @return +// * @throws ClassNotFoundException +// */ +// @Override +// public Class loadClass(String name, boolean resolve) throws ClassNotFoundException { +// synchronized (getClassLoadingLock(name)){ +// //First, check if the class has already been loaded +// Class c = findLoadedClass(name); +// if(c == null){ +// long t0 = System.nanoTime(); +// try { +// //invoke findClass in this class +// c = findClass(name); +// }catch(ClassNotFoundException e){ +// // ClassNotFoundException thrown if class not found +// } +// if(c == null){ +// return super.loadClass(name, resolve); +// } +// //For compatibility with higher versions > java 1.8.0_141 +//// sun.misc.PerfCounter.getFindClasses().addElapsedTimeFrom(t0); +//// sun.misc.PerfCounter.getFindClasses().increment(); +// } +// if(resolve){ +// resolveClass(c); +// } +// return c; +// } +// } /** * defined class by bytes diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java index cea1e37ce..d6d037507 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java @@ -14,6 +14,7 @@ import com.alibaba.datax.plugin.rdbms.util.*; import com.google.common.collect.Lists; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +67,7 @@ public void preCheck(Configuration originalConfig, DataBaseType dataBaseType) { } Collection taskList = new ArrayList(); for (int i = 0, len = connList.size(); i < len; i++) { - Configuration connConf = Configuration.from(connList.get(i).toString()); + Configuration connConf = Configuration.from(Json.toJson(connList.get(i), null)); PreCheckTask t = new PreCheckTask(username, password, proxyHost, proxyPort, connConf, dataBaseType, splitPK); taskList.add(t); } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java index b48818398..037c8ce1c 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java @@ -7,6 +7,7 @@ import com.alibaba.datax.plugin.rdbms.reader.Key; import com.alibaba.datax.plugin.rdbms.util.*; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -90,7 +91,7 @@ private static void dealJdbcAndTable(Configuration originalConfig) { for (int i = 0, len = conns.size(); i < len; i++) { Configuration connConf = Configuration - .from(conns.get(i).toString()); + .from(Json.toJson(conns.get(i), null)); connConf.getNecessaryValue(Key.JDBC_URL, DBUtilErrorCode.REQUIRED_VALUE); @@ -265,7 +266,7 @@ private static boolean recognizeTableOrQuerySqlMode( boolean isQuerySqlMode = false; for (int i = 0, len = conns.size(); i < len; i++) { Configuration connConf = Configuration - .from(conns.get(i).toString()); + .from(Json.toJson(conns.get(i), null)); table = connConf.getString(Key.TABLE, null); querySql = connConf.getString(Key.QUERY_SQL, null); diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java similarity index 93% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java index 5fdb8054c..8c7b1df00 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java @@ -7,6 +7,7 @@ import com.alibaba.datax.plugin.rdbms.util.DataBaseType; import com.alibaba.datax.plugin.rdbms.util.RdbmsException; import com.alibaba.druid.sql.parser.ParserException; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,11 +61,11 @@ public Boolean call() throws DataXException { for (int i = 0; i < querySqls.size(); i++) { String splitPkSql = null; - String querySql = querySqls.get(i).toString(); + String querySql = Json.toJson(querySqls.get(i), null); String table = null; if (tables != null && !tables.isEmpty()) { - table = tables.get(i).toString(); + table = Json.toJson(tables.get(i), null); } /*verify query*/ @@ -84,7 +85,7 @@ public Boolean call() throws DataXException { /*verify splitPK*/ try { if (splitPkSqls != null && !splitPkSqls.isEmpty()) { - splitPkSql = splitPkSqls.get(i).toString(); + splitPkSql = Json.toJson(splitPkSqls.get(i), null); DBUtil.sqlValid(splitPkSql, dataBaseType); if (i == 0) { SingleTableSplitUtil.precheckSplitPk(conn, splitPkSql, fetchSize, table, userName); diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java similarity index 96% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java index 928e81249..c72f0e61a 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java @@ -5,6 +5,7 @@ import com.alibaba.datax.plugin.rdbms.reader.Constant; import com.alibaba.datax.plugin.rdbms.reader.Key; import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; @@ -38,7 +39,7 @@ public static List doSplit( for (int i = 0, len = conns.size(); i < len; i++) { Configuration sliceConfig = originalSliceConfig.clone(); - Configuration connConf = Configuration.from(conns.get(i).toString()); + Configuration connConf = Configuration.from(Json.toJson(conns.get(i), null)); String jdbcUrl = connConf.getString(Key.JDBC_URL); sliceConfig.set(Key.JDBC_URL, jdbcUrl); @@ -119,7 +120,7 @@ public static Configuration doPreCheckSplit(Configuration originalSliceConfig) { List conns = queryConfig.getList(Constant.CONN_MARK, Object.class); for (int i = 0, len = conns.size(); i < len; i++) { - Configuration connConf = Configuration.from(conns.get(i).toString()); + Configuration connConf = Configuration.from(Json.toJson(conns.get(i), null)); List querys = new ArrayList(); List splitPkQuerys = new ArrayList(); String connPath = String.format("connection[%d]", i); diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java index eace0b099..9f153ff88 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java @@ -5,7 +5,7 @@ import com.alibaba.datax.plugin.rdbms.reader.Constant; import com.alibaba.datax.plugin.rdbms.reader.Key; import com.alibaba.datax.plugin.rdbms.util.*; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; @@ -344,7 +344,7 @@ public static List genSplitSqlForOracle(String splitPK, } finally { DBUtil.closeDBResources(rs, null, null); } - LOG.debug(JSON.toJSONString(splitedRange)); + LOG.debug(Json.toJson(splitedRange, null)); List rangeSql = new ArrayList(); int splitedRangeSize = splitedRange.size(); // warn: splitedRangeSize may be 0 or 1,切分规则为IS NULL以及 IS NOT NULL diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java index 99275182b..0d4e71cfa 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java @@ -10,6 +10,7 @@ import com.alibaba.datax.plugin.rdbms.writer.util.OriginalConfPretreatmentUtil; import com.alibaba.datax.plugin.rdbms.writer.util.WriterUtil; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Triple; import org.slf4j.Logger; @@ -69,7 +70,7 @@ public void privilegeValid(Configuration originalConfig, DataBaseType dataBaseTy Object.class); for (int i = 0, len = connections.size(); i < len; i++) { - Configuration connConf = Configuration.from(connections.get(i).toString()); + Configuration connConf = Configuration.from(Json.toJson(connections.get(i), null)); String jdbcUrl = connConf.getString(Key.JDBC_URL); List expandedTables = connConf.getList(Key.TABLE, String.class); boolean hasInsertPri = DBUtil.checkInsertPrivilege(dataBaseType, jdbcUrl, username, password, @@ -99,8 +100,7 @@ public void prepare(Configuration originalConfig) { int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); List conns = originalConfig.getList(Constant.CONN_MARK, Object.class); - Configuration connConf = Configuration.from(conns.get(0) - .toString()); + Configuration connConf = Configuration.from(Json.toJson(conns.get(0), null)); // 这里的 jdbcUrl 已经 append 了合适后缀参数 String jdbcUrl = connConf.getString(Key.JDBC_URL); diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java index 864e0fae9..7caa788ef 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java @@ -7,6 +7,7 @@ import com.alibaba.datax.plugin.rdbms.writer.Key; import com.alibaba.datax.plugin.rdbms.writer.Constant; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,7 +63,7 @@ public static void simplifyConf(Configuration originalConfig) { int tableNum = 0; for (int i = 0, len = connections.size(); i < len; i++) { - Configuration connConf = Configuration.from(connections.get(i).toString()); + Configuration connConf = Configuration.from(Json.toJson(connections.get(i), null)); String jdbcUrl = ""; if(DATABASE_TYPE.equals(DataBaseType.MySql)){ diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java index 6b252a8a5..3913a4088 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java @@ -9,6 +9,7 @@ import com.alibaba.datax.plugin.rdbms.writer.Constant; import com.alibaba.datax.plugin.rdbms.writer.Key; import com.alibaba.druid.sql.parser.ParserException; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,7 +57,7 @@ public static List doSplit(Configuration simplifiedConf, for (Object conn : conns) { Configuration sliceConfig = simplifiedConf.clone(); - Configuration connConf = Configuration.from(conn.toString()); + Configuration connConf = Configuration.from(Json.toJson(conn, null)); jdbcUrl = connConf.getString(Key.JDBC_URL); sliceConfig.set(Key.JDBC_URL, jdbcUrl); @@ -208,7 +209,7 @@ public static String onMergeIntoDoString(List primaryKeys, List public static void preCheckPrePareSQL(Configuration originalConfig, DataBaseType type) { List conns = originalConfig.getList(Constant.CONN_MARK, Object.class); - Configuration connConf = Configuration.from(conns.get(0).toString()); + Configuration connConf = Configuration.from(Json.toJson(conns.get(0), null)); String table = connConf.getList(Key.TABLE, String.class).get(0); List preSqls = originalConfig.getList(Key.PRE_SQL, @@ -231,7 +232,7 @@ public static void preCheckPrePareSQL(Configuration originalConfig, DataBaseType public static void preCheckPostSQL(Configuration originalConfig, DataBaseType type) { List conns = originalConfig.getList(Constant.CONN_MARK, Object.class); - Configuration connConf = Configuration.from(conns.get(0).toString()); + Configuration connConf = Configuration.from(Json.toJson(conns.get(0), null)); String table = connConf.getList(Key.TABLE, String.class).get(0); List postSqls = originalConfig.getList(Key.POST_SQL, diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java similarity index 92% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java index 5827cd2c9..eb69de903 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java @@ -1,6 +1,6 @@ package com.alibaba.datax.plugin.unstructuredstorage.reader; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import java.text.DateFormat; @@ -57,6 +57,6 @@ public String toJSONString() { } public static String toJSONString(ColumnEntry columnEntry) { - return JSON.toJSONString(columnEntry); + return Json.toJson(columnEntry, null); } } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java index 4715d7999..35e1b993c 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java @@ -5,10 +5,8 @@ import com.alibaba.datax.common.plugin.RecordSender; import com.alibaba.datax.common.plugin.TaskPluginCollector; import com.alibaba.datax.common.util.Configuration; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.TypeReference; import com.csvreader.CsvReader; +import com.webank.wedatasphere.exchangis.datax.util.Json; import io.airlift.compress.snappy.SnappyCodec; import io.airlift.compress.snappy.SnappyFramedInputStream; import org.anarres.lzo.LzoDecompressor1x_safe; @@ -426,13 +424,13 @@ public static Record transportOneRecord(RecordSender recordSender, public static List getListColumnEntry( Configuration configuration, final String path) { - List lists = configuration.getList(path, JSONObject.class); + List lists = configuration.getList(path, Object.class); if (lists == null) { return null; } List result = new ArrayList(); - for (final JSONObject object : lists) { - result.add(JSON.parseObject(object.toJSONString(), + for (final Object object : lists) { + result.add(Json.fromJson(Json.toJson(object, null), ColumnEntry.class)); } return result; @@ -562,8 +560,7 @@ public static void validateCsvReaderConfig(Configuration readerConfiguration) { String csvReaderConfig = readerConfiguration.getString(Key.CSV_READER_CONFIG); if (StringUtils.isNotBlank(csvReaderConfig)) { try { - UnstructuredStorageReaderUtil.csvReaderConfigMap = JSON.parseObject(csvReaderConfig, new TypeReference>() { - }); + UnstructuredStorageReaderUtil.csvReaderConfigMap = Json.fromJson(csvReaderConfig, Map.class, String.class, Object.class); } catch (Exception e) { LOG.info(String.format("WARN!!!!忽略csvReaderConfig配置! 配置错误,值只能为空或者为Map结构,您配置的值为: %s", csvReaderConfig)); } @@ -617,15 +614,15 @@ public static void setCsvReaderConfig(CsvReader csvReader) { if (null != UnstructuredStorageReaderUtil.csvReaderConfigMap && !UnstructuredStorageReaderUtil.csvReaderConfigMap.isEmpty()) { try { BeanUtils.populate(csvReader, UnstructuredStorageReaderUtil.csvReaderConfigMap); - LOG.info(String.format("csvReaderConfig设置成功,设置后CsvReader:%s", JSON.toJSONString(csvReader))); + LOG.info(String.format("csvReaderConfig设置成功,设置后CsvReader:%s", Json.toJson(csvReader, null))); } catch (Exception e) { LOG.info(String.format("WARN!!!!忽略csvReaderConfig配置!通过BeanUtils.populate配置您的csvReaderConfig发生异常,您配置的值为: %s;请检查您的配置!CsvReader使用默认值[%s]", - JSON.toJSONString(UnstructuredStorageReaderUtil.csvReaderConfigMap), JSON.toJSONString(csvReader))); + Json.toJson(UnstructuredStorageReaderUtil.csvReaderConfigMap, null), Json.toJson(csvReader, null))); } } else { //默认关闭安全模式, 放开10W字节的限制 csvReader.setSafetySwitch(false); - LOG.info(String.format("CsvReader使用默认值[%s],csvReaderConfig值为[%s]", JSON.toJSONString(csvReader), JSON.toJSONString(UnstructuredStorageReaderUtil.csvReaderConfigMap))); + LOG.info(String.format("CsvReader使用默认值[%s],csvReaderConfig值为[%s]", Json.toJson(csvReader, null), Json.toJson(UnstructuredStorageReaderUtil.csvReaderConfigMap, null))); } } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java index 2e1c6a180..c9037c15b 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java @@ -74,7 +74,7 @@ public static Object string2Object(String str) throws IOException, ClassNotFound public static String md5(String source, String salt, int iterator){ StringBuilder token = new StringBuilder(); try{ - MessageDigest digest = MessageDigest.getInstance("md5"); + MessageDigest digest = MessageDigest.getInstance("sha-256"); if(StringUtils.isNotEmpty(salt)){ digest.update(salt.getBytes(StandardCharsets.UTF_8)); } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java index eedc7b2ed..3c869fda9 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java @@ -22,7 +22,7 @@ import com.alibaba.datax.common.exception.DataXException; import com.webank.wedatasphere.exchangis.datax.core.transport.channel.StreamChannel; import com.alibaba.datax.core.util.FrameworkErrorCode; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,7 +80,7 @@ public InputStream nextStream() throws IOException { public StreamMeta streamMetaData(String encoding){ try{ String metaJson = new String(streamMeta, encoding); - return JSON.parseObject(metaJson, StreamMeta.class); + return Json.fromJson(metaJson, StreamMeta.class); }catch(Exception e){ throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, e); } diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java similarity index 97% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java index e55c5f93e..85cedd484 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java @@ -20,7 +20,7 @@ import com.alibaba.datax.common.exception.CommonErrorCode; import com.alibaba.datax.common.exception.DataXException; import com.webank.wedatasphere.exchangis.datax.core.transport.channel.StreamChannel; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +66,7 @@ public OutputStream createStream(StreamMeta meta, String encoding) throws IOExce if(StringUtils.isBlank(encoding)){ encoding = DEFAULT_ENCODING; } - String metaJson = JSON.toJSONString(meta); + String metaJson = Json.toJson(meta, null); return createStream(metaJson.getBytes(encoding)); } private OutputStream createStream(byte[] metaData) throws IOException { diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/Json.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/Json.java new file mode 100644 index 000000000..3206291b2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/Json.java @@ -0,0 +1,106 @@ +package com.webank.wedatasphere.exchangis.datax.util; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.*; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; + + +public class Json { + private static final String PREFIX = "["; + private static final String SUFFIX = "]"; + private static final Logger logger = LoggerFactory.getLogger(Json.class); + + private static ObjectMapper mapper; + + static{ + mapper = new ObjectMapper(); + mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); + mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); + mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); + mapper.configure(DeserializationFeature.READ_ENUMS_USING_TO_STRING, true); + mapper.configure(SerializationFeature.WRITE_ENUMS_USING_TO_STRING, true); + mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true); + //empty beans allowed + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + //ignore unknown properties + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + //cancel to scape non ascii + mapper.configure(JsonGenerator.Feature.ESCAPE_NON_ASCII, false); + } + private Json(){} + + public static ObjectMapper getMapper(){ + return mapper; + } + + @SuppressWarnings("unchecked") + public static T fromJson(String json, Class clazz, Class... parameters){ + if(StringUtils.isNotBlank(json)){ + try{ + if(parameters.length > 0){ + return (T)mapper.readValue(json, mapper.getTypeFactory().constructParametricType(clazz, parameters)); + } + if(json.startsWith(PREFIX) + && json.endsWith(SUFFIX)){ + JavaType javaType = mapper.getTypeFactory() + .constructParametricType(ArrayList.class, clazz); + return mapper.readValue(json, javaType); + } + return (T)mapper.readValue(json, clazz); + } catch (Exception e) { + logger.info(e.getLocalizedMessage()); + throw new RuntimeException(e); + } + } + return null; + } + + public static T fromJson(InputStream stream, Class clazz, Class... parameters){ + StringBuilder builder = new StringBuilder(); + String jsonStr = null; + try{ + BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)); + while((jsonStr = reader.readLine()) != null){ + builder.append(jsonStr); + } + reader.close(); + }catch(Exception e){ + logger.info(e.getLocalizedMessage()); + throw new RuntimeException(e); + } + return fromJson(builder.toString(), clazz, parameters); + } + + public static String toJson(Object obj, Class model){ + return toJson(obj, model, false); + } + public static String toJson(Object obj, Class model, boolean beautify){ + ObjectWriter writer = mapper.writer(); + if(null != obj){ + try{ + if(null != model){ + writer = writer.withView(model); + } + if(beautify){ + return writer.withDefaultPrettyPrinter().writeValueAsString(obj); + } + return writer.writeValueAsString(obj); + } catch (JsonProcessingException e) { + logger.info(e.getLocalizedMessage()); + throw new RuntimeException(e); + } + } + return null; + } + +} diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java rename to exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar rename to exchangis-engines/engines/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/lib/db2jcc4.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/db2jcc4.jar similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/lib/db2jcc4.jar rename to exchangis-engines/engines/datax/datax-core/src/main/lib/db2jcc4.jar diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/lib/edb-jdbc16.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/edb-jdbc16.jar similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/lib/edb-jdbc16.jar rename to exchangis-engines/engines/datax/datax-core/src/main/lib/edb-jdbc16.jar diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar rename to exchangis-engines/engines/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/resources/.secret.properties b/exchangis-engines/engines/datax/datax-core/src/main/resources/.secret.properties similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/resources/.secret.properties rename to exchangis-engines/engines/datax/datax-core/src/main/resources/.secret.properties diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/resources/core.json b/exchangis-engines/engines/datax/datax-core/src/main/resources/core.json similarity index 86% rename from exchangis-plugins/engine/datax/datax-core/src/main/resources/core.json rename to exchangis-engines/engines/datax/datax-core/src/main/resources/core.json index 8ad9ecdbe..b056dfc5d 100644 --- a/exchangis-plugins/engine/datax/datax-core/src/main/resources/core.json +++ b/exchangis-engines/engines/datax/datax-core/src/main/resources/core.json @@ -15,17 +15,6 @@ } }, "core": { - "dataXServer": { - "address": "${server.address}/api/v1/task/process", - "timeout": 10000, - "protocol":"http", - "endpoint": { - "report": "/report", - "reportState": "/report/state" - }, - "reportDataxLog": false, - "reportPerfLog": false - }, "transport": { "type": "record", "channel":{ diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/resources/kerberos.properties b/exchangis-engines/engines/datax/datax-core/src/main/resources/kerberos.properties similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/resources/kerberos.properties rename to exchangis-engines/engines/datax/datax-core/src/main/resources/kerberos.properties diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/resources/ldap.properties b/exchangis-engines/engines/datax/datax-core/src/main/resources/ldap.properties similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/resources/ldap.properties rename to exchangis-engines/engines/datax/datax-core/src/main/resources/ldap.properties diff --git a/exchangis-plugins/engine/datax/datax-core/src/main/resources/log/logback.xml b/exchangis-engines/engines/datax/datax-core/src/main/resources/log/logback.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-core/src/main/resources/log/logback.xml rename to exchangis-engines/engines/datax/datax-core/src/main/resources/log/logback.xml diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/pom.xml b/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml similarity index 93% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/pom.xml rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml index 7d2eebedb..357a9b855 100644 --- a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 jar - ${datax.engine.version} + 3.0.0-Plus-2 datax-elasticsearchwriter @@ -72,10 +72,6 @@ - - com.alibaba - fastjson - diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java index 7866336c7..0417d7958 100644 --- a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java @@ -18,7 +18,7 @@ package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6; import com.alibaba.datax.common.exception.DataXException; -import com.alibaba.fastjson.JSON; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.http.HttpHost; import org.apache.http.HttpRequestInterceptor; import org.apache.http.auth.AuthScope; @@ -200,7 +200,7 @@ private void putMapping(String indexName, String typeName, Map p AcknowledgedResponse acknowledgedResponse = restClient.indices().putMapping(configureTimedRequest(request), optionsBuilder.build()); if(!acknowledgedResponse.isAcknowledged()){ throw DataXException.asDataXException(ElasticWriterErrorCode.PUT_MAPPINGS_ERROR, - "can't put mapping, type:[" + typeName +"], properties:" +JSON.toJSONString(properties)); + "can't put mapping, type:[" + typeName +"], properties:" + Json.toJson(properties, null)); } } @@ -214,7 +214,7 @@ private void createIndex(String indexName, Map settings) throws CreateIndexResponse response = restClient.indices().create(configureTimedRequest(request), COMMON_OPTIONS); if(!response.isAcknowledged()){ throw DataXException.asDataXException(ElasticWriterErrorCode.CREATE_INDEX_ERROR, "can't create index:[" + indexName + - "], settings:" + JSON.toJSONString(settings) + ", message:[acknowledged=false]"); + "], settings:" + Json.toJson(settings, null) + ", message:[acknowledged=false]"); } }catch(ElasticsearchException e){ if(e.status().getStatus() diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java similarity index 96% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java index a91d83eff..6c1e5ac58 100644 --- a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java @@ -27,10 +27,8 @@ import com.alibaba.datax.core.statistics.plugin.task.util.DirtyRecord; import com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column.ElasticColumn; import com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column.ElasticFieldDataType; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.TypeReference; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -145,14 +143,14 @@ private Map resolveColumn(ElasticRestClient client, //allow to custom the fields of properties properties = new HashMap<>(rawColumnList.size()); rawColumnList.forEach(columnRaw -> { - String raw = columnRaw.toString(); - ElasticColumn column = JSONObject - .parseObject(raw, ElasticColumn.class); + String raw = Json.toJson(columnRaw, null); + ElasticColumn column = Json + .fromJson(raw, ElasticColumn.class); if (StringUtils.isNotBlank(column.getName()) && StringUtils.isNotBlank(column.getType())) { outputColumn.add(column); if (!column.getName().equals(DEFAULT_ID) && ElasticFieldDataType.valueOf(column.getType().toUpperCase()) != ElasticFieldDataType.ALIAS) { - Map property = JSONObject.parseObject(raw, Map.class); + Map property = Json.fromJson(raw, Map.class); property.remove(ElasticKey.PROPS_COLUMN_NAME); properties.put(column.getName(), property); } @@ -229,8 +227,7 @@ public void init() { columnNameSeparator = this.taskConf.getString(ElasticKey.COLUMN_NAME_SEPARATOR, ElasticColumn.DEFAULT_NAME_SPLIT); int batchSize = this.taskConf.getInt(ElasticKey.BULK_ACTIONS, 1000); int bulkPerTask = this.taskConf.getInt(ElasticKey.BULK_PER_TASK, 1); - columns = JSON.parseObject(this.taskConf.getString(ElasticKey.PROPS_COLUMN), new TypeReference>(){ - }); + columns = Json.fromJson(this.taskConf.getString(ElasticKey.PROPS_COLUMN), List.class, ElasticColumn.class); String userName = this.taskConf.getString(ElasticKey.USERNAME, ""); String password = this.taskConf.getString(ElasticKey.PASSWORD, ""); if(StringUtils.isNotBlank(password)){ @@ -318,7 +315,7 @@ public void afterBulk(long l, BulkRequest bulkRequest, BulkResponse bulkResponse message.add(String.valueOf(itemResponse.getFailure().getStatus().getStatus())); message.add(itemResponse.getId()); message.add(itemResponse.getFailureMessage()); - pluginCollector.collectDirtyRecord(new DirtyRecord(), null, JSON.toJSONString(message)); + pluginCollector.collectDirtyRecord(new DirtyRecord(), null, Json.toJson(message, null)); } } } diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java similarity index 96% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java index 75b6c80a0..07502081c 100644 --- a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java @@ -23,8 +23,7 @@ import com.alibaba.datax.common.element.Record; import com.alibaba.datax.common.exception.DataXException; import com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.ElasticWriterErrorCode; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.TypeReference; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -156,9 +155,9 @@ public static Map toData(Record record, List colC private static Object parseObject(String rawData){ if(rawData.startsWith(ARRAY_PREFIX) && rawData.endsWith(ARRAY_SUFFIX)){ - return JSON.parseObject(rawData, new TypeReference>(){}); + return Json.fromJson(rawData, Object.class); } - return JSON.parseObject(rawData, Map.class); + return Json.fromJson(rawData, Map.class); } private static String parseDate(ElasticColumn config, Column column){ diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java diff --git a/exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-elasticsearchwriter/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/pom.xml b/exchangis-engines/engines/datax/datax-ftpreader/pom.xml similarity index 91% rename from exchangis-plugins/engine/datax/datax-ftpreader/pom.xml rename to exchangis-engines/engines/datax/datax-ftpreader/pom.xml index 3dfd88703..8520f0145 100644 --- a/exchangis-plugins/engine/datax/datax-ftpreader/pom.xml +++ b/exchangis-engines/engines/datax/datax-ftpreader/pom.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 jar - ${datax.engine.version} + 3.0.0-Plus-2 datax-ftpreader @@ -46,11 +46,6 @@ ${commons-codec} provided - - com.alibaba - fastjson - provided - ch.qos.logback logback-classic diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-ftpreader/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java index 4917e3108..a99e683a1 100644 --- a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java @@ -11,8 +11,7 @@ import com.alibaba.datax.core.util.FrameworkErrorCode; import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -354,7 +353,7 @@ public void destroy() { public void startRead(RecordSender recordSender) { LOG.info("start read source files..."); for (Object sourceFile : this.sourceFiles) { - PathMeta pathMeta = JSONObject.parseObject(JSON.toJSONString(sourceFile), PathMeta.class); + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); String fileName = pathMeta.getAbsolute(); LOG.info(String.format("reading file : [%s]", fileName)); InputStream inputStream = null; @@ -372,7 +371,7 @@ public void startRead(RecordSender recordSender) { public void startRead(ChannelOutput channelOutput) { LOG.info("start read source files to stream channel..."); for(Object sourceFile : this.sourceFiles){ - PathMeta pathMeta = JSONObject.parseObject(JSON.toJSONString(sourceFile), PathMeta.class); + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); String absolutePath = pathMeta.getAbsolute(); String relativePath = pathMeta.getRelative(); LOG.info(String.format("reading file: [%s]", absolutePath)); diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/resources/plugin-template.json b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin-template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/resources/plugin-template.json rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin-template.json diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-ftpreader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpreader/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/pom.xml b/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml similarity index 91% rename from exchangis-plugins/engine/datax/datax-ftpwriter/pom.xml rename to exchangis-engines/engines/datax/datax-ftpwriter/pom.xml index dd3add73a..ec0635c29 100644 --- a/exchangis-plugins/engine/datax/datax-ftpwriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 jar - ${datax.engine.version} + 3.0.0-Plus-2 datax-ftpwriter @@ -28,7 +28,6 @@ provided org.slf4j slf4j-api - ch.qos.logback @@ -51,11 +50,6 @@ ${commons-codec} provided - - com.alibaba - fastjson - provided - com.jcraft jsch @@ -66,7 +60,6 @@ commons-net 3.3 - diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java index b6ddbbfe0..7a33554eb 100644 --- a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java @@ -3,11 +3,10 @@ import com.alibaba.datax.common.exception.DataXException; import com.alibaba.datax.plugin.unstructuredstorage.writer.UnstructuredStorageWriterUtil; import com.alibaba.datax.plugin.writer.ftpwriter.FtpWriterErrorCode; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; import com.jcraft.jsch.*; import com.jcraft.jsch.ChannelSftp.LsEntry; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -251,8 +250,7 @@ public Set getAllFilesInDir(String dir, String prefixFileName, boolean r try { @SuppressWarnings("rawtypes") Vector allFiles = this.channelSftp.ls(dir); - LOG.debug(String.format("ls: %s", JSON.toJSONString(allFiles, - SerializerFeature.UseSingleQuotes))); + LOG.debug(String.format("ls: %s", Json.toJson(allFiles, null))); StringBuilder dirBuilder = new StringBuilder(dir); if(!dirBuilder.toString().endsWith(String.valueOf(IOUtils.DIR_SEPARATOR))){ dirBuilder.append(IOUtils.DIR_SEPARATOR); diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java index f0b149d4b..13bc5a59a 100644 --- a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java @@ -2,9 +2,8 @@ import com.alibaba.datax.common.exception.DataXException; import com.alibaba.datax.plugin.writer.ftpwriter.FtpWriterErrorCode; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.net.ftp.FTPClient; @@ -243,7 +242,7 @@ public Set getAllFilesInDir(String dir, String prefixFileName, boolean r this.printWorkingDirectory(); FTPFile[] fs = this.ftpClient.listFiles(dir); LOG.debug(String.format("ls: %s", - JSON.toJSONString(fs, SerializerFeature.UseSingleQuotes))); + Json.toJson(fs, null))); StringBuilder dirBuilder = new StringBuilder(dir); if(!dirBuilder.toString().endsWith(String.valueOf(IOUtils.DIR_SEPARATOR))){ dirBuilder.append(IOUtils.DIR_SEPARATOR); diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/pom.xml b/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml similarity index 88% rename from exchangis-plugins/engine/datax/datax-hdfsreader/pom.xml rename to exchangis-engines/engines/datax/datax-hdfsreader/pom.xml index fb7665150..81789b25b 100644 --- a/exchangis-plugins/engine/datax/datax-hdfsreader/pom.xml +++ b/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml @@ -3,23 +3,22 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 datax-hdfsreader jar - ${datax.engine.version} + 3.0.0-Plus-2 - 1.2.1 - 2.7.2 - 1.2.2 + 3.1.3 + 3.3.4 + 2.5.5 2.9.1 - com.webank.wedatasphere.exchangis datax-core @@ -61,7 +60,7 @@ io.netty netty-all - 4.1.46.Final + 4.1.86.Final org.apache.hadoop @@ -78,10 +77,6 @@ hadoop-mapreduce-client-core ${hadoop.version} - - com.alibaba - fastjson - org.apache.hive hive-exec @@ -95,6 +90,10 @@ zookeeper org.apache.zookeeper + + org.pentaho + pentaho-aggdesigner-algorithm + @@ -108,6 +107,10 @@ hbase-server ${hbase.version} + + org.apache.hadoop + hadoop-hdfs + diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java index 49faa5af1..5b3715e16 100644 --- a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java @@ -11,8 +11,7 @@ import com.alibaba.datax.core.util.FrameworkErrorCode; import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -360,7 +359,7 @@ public void startRead(RecordSender recordSender) { LOG.info("Read start"); hdfsReaderUtil.getUgi().doAs((PrivilegedAction) () -> { for (Object sourceFile : sourceFiles) { - PathMeta pathMeta = JSONObject.parseObject(JSON.toJSONString(sourceFile), PathMeta.class); + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); String fileName = pathMeta.getAbsolute(); LOG.info(String.format("Reading file : [%s]", fileName)); @@ -421,7 +420,7 @@ public void startRead(ChannelOutput channelOutput) { LOG.info("start read source HDFS files to stream channel..."); hdfsReaderUtil.getUgi().doAs((PrivilegedAction) () ->{ for(Object sourceFile : sourceFiles){ - PathMeta pathMeta = JSONObject.parseObject(JSON.toJSONString(sourceFile), PathMeta.class); + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); String absolutePath = pathMeta.getAbsolute(); String relativePath = pathMeta.getRelative(); LOG.info(String.format("reading file : [%s]", absolutePath)); diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java index a8a815afb..7738c9ce8 100644 --- a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java @@ -13,10 +13,9 @@ import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderErrorCode; import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; import com.alibaba.datax.plugin.utils.HdfsUserGroupInfoLock; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; import com.webank.wedatasphere.exchangis.datax.common.ldap.LdapConnector; +import com.webank.wedatasphere.exchangis.datax.util.Json; import com.webank.wedatasphere.exchangis.datax.util.KerberosUtil; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.*; @@ -94,11 +93,12 @@ UserGroupInformation getUgi(){ hadoopConf = new org.apache.hadoop.conf.Configuration(); //http://blog.csdn.net/yangjl38/article/details/7583374 Configuration hadoopSiteParams = readerConfig.getConfiguration(Key.HADOOP_CONFIG); - JSONObject hadoopSiteParamsAsJsonObject = JSON.parseObject(readerConfig.getString(Key.HADOOP_CONFIG)); + Map hadoopSiteParamsAsJsonObject = Json.fromJson(readerConfig.getString(Key.HADOOP_CONFIG), Map.class); if (null != hadoopSiteParams) { Set paramKeys = hadoopSiteParams.getKeys(); for (String each : paramKeys) { - hadoopConf.set(each, hadoopSiteParamsAsJsonObject.getString(each)); + assert hadoopSiteParamsAsJsonObject != null; + hadoopConf.set(each, String.valueOf(hadoopSiteParamsAsJsonObject.getOrDefault(each, ""))); } } hadoopConf.set(HDFS_DEFAULT_FS_KEY, readerConfig.getString(Key.DEFAULT_FS)); @@ -152,7 +152,7 @@ UserGroupInformation getUgi(){ throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, message); } - LOG.info(String.format("hadoopConfig details:%s", JSON.toJSONString(this.hadoopConf))); + LOG.trace(String.format("hadoopConfig details:%s", Json.toJson(this.hadoopConf, null))); } /** @@ -544,7 +544,7 @@ void orcFileStartRead(String sourceOrcFilePath, Configuration readerSliceConfig, throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, message); } } else { - String message = String.format("请确认您所读取的列配置正确!columnIndexMax 小于0,column:%s", JSON.toJSONString(column)); + String message = String.format("请确认您所读取的列配置正确!columnIndexMax 小于0,column:%s", Json.toJson(column, null)); throw DataXException.asDataXException(HdfsReaderErrorCode.BAD_CONFIG_VALUE, message); } } @@ -709,7 +709,7 @@ private int getMaxIndex(List columnConfigs) { Integer columnIndex = columnConfig.getIndex(); if (columnIndex != null && columnIndex < 0) { String message = String.format("您column中配置的index不能小于0,请修改为正确的index,column配置:%s", - JSON.toJSONString(columnConfigs)); + Json.toJson(columnConfigs, null)); LOG.error(message); throw DataXException.asDataXException(HdfsReaderErrorCode.CONFIG_INVALID_EXCEPTION, message); } else if (columnIndex != null && columnIndex > maxIndex) { diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java similarity index 94% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java index 0fe8d4a69..b46189e7c 100644 --- a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java @@ -47,19 +47,18 @@ public void parse(String inputPath, Configuration parseConf, Action action) { org.apache.hadoop.conf.Configuration configuration = fileSystem.getConf(); LOG.info("Start to parse HFile: [" + inputPath + "] in HBASEV1HFileParser"); try (HFile.Reader reader = HFile.createReader(fileSystem, new Path(inputPath), - new CacheConfig(configuration), configuration)) { - HFileScanner scanner = reader.getScanner(true, true); + new CacheConfig(configuration), false, configuration)) { + HFileScanner scanner = reader.getScanner(configuration, true, true); if(null == parseConf){ parseConf = Configuration.from("{}"); for(String parseColumn : PARSE_COLUMNS){ parseConf.set(parseColumn, true); } } - reader.loadFileInfo(); if(scanner.seekTo()) { do { //Cell entity - Cell cell = scanner.getKeyValue(); + Cell cell = scanner.getCell(); List sourceList = new ArrayList<>(); parseConf.getKeys().forEach(configKey -> { switch(configKey){ diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java similarity index 75% rename from exchangis-plugins/engine/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java rename to exchangis-engines/engines/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java index d40a9644c..dae334eab 100644 --- a/exchangis-plugins/engine/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java @@ -4,7 +4,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; @@ -15,11 +14,10 @@ public class HFileParser { public void parse(String pathString, Configuration hadoopConf) throws IOException { FileSystem fs = new Path(pathString).getFileSystem(hadoopConf); - HFile.Reader reader = HFile.createReader(fs, new Path(pathString), new CacheConfig(hadoopConf), hadoopConf); - HFileScanner scanner = reader.getScanner(true, true); - reader.loadFileInfo(); + HFile.Reader reader = HFile.createReader(fs, new Path(pathString), new CacheConfig(hadoopConf), false, hadoopConf); + HFileScanner scanner = reader.getScanner(hadoopConf, true, true); scanner.seekTo(); - Cell cell = scanner.getKeyValue(); + Cell cell = scanner.getCell(); scanner.next(); } } diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/pom.xml b/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml similarity index 88% rename from exchangis-plugins/engine/datax/datax-hdfswriter/pom.xml rename to exchangis-engines/engines/datax/datax-hdfswriter/pom.xml index b7442a04f..11c814aec 100644 --- a/exchangis-plugins/engine/datax/datax-hdfswriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml @@ -3,19 +3,19 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 datax-hdfswriter jar - ${datax.engine.version} + 3.0.0-Plus-2 - 1.2.1 - 2.7.2 + 3.1.3 + 3.3.4 2.9.1 @@ -36,10 +36,6 @@ ${datax.engine.version} - - com.alibaba - fastjson - org.slf4j slf4j-api @@ -50,7 +46,6 @@ logback-classic provided - org.apache.hadoop hadoop-hdfs @@ -70,7 +65,7 @@ io.netty netty-all - 4.1.46.Final + 4.1.86.Final org.apache.hadoop @@ -104,6 +99,10 @@ geronimo-jaspic_1.0_spec org.apache.geronimo.specs + + org.pentaho + pentaho-aggdesigner-algorithm + @@ -115,6 +114,12 @@ org.apache.hive.hcatalog hive-hcatalog-core ${hive.version} + + + org.pentaho + pentaho-aggdesigner-algorithm + + xalan diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java similarity index 99% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java index 519a65476..305e04c7c 100644 --- a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java @@ -432,4 +432,8 @@ public void destroy() { } } + + public static void main(String[] args) { + + } } diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java index 7ff33ac68..c5bfc5113 100644 --- a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java @@ -9,11 +9,10 @@ import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; import com.alibaba.datax.core.util.LdapUtil; import com.alibaba.datax.plugin.utils.HdfsUserGroupInfoLock; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; import com.google.common.collect.Lists; import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; import com.webank.wedatasphere.exchangis.datax.common.ldap.LdapConnector; +import com.webank.wedatasphere.exchangis.datax.util.Json; import com.webank.wedatasphere.exchangis.datax.util.KerberosUtil; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -78,11 +77,12 @@ void getFileSystem(String defaultFS, Configuration taskConfig) { hadoopConf = new org.apache.hadoop.conf.Configuration(); this.writerConfig = taskConfig; Configuration hadoopSiteParams = taskConfig.getConfiguration(Key.HADOOP_CONFIG); - JSONObject hadoopSiteParamsAsJsonObject = JSON.parseObject(taskConfig.getString(Key.HADOOP_CONFIG)); + Map hadoopSiteParamsAsJsonObject = Json.fromJson(taskConfig.getString(Key.HADOOP_CONFIG), Map.class); if (null != hadoopSiteParams) { Set paramKeys = hadoopSiteParams.getKeys(); for (String each : paramKeys) { - hadoopConf.set(each, hadoopSiteParamsAsJsonObject.getString(each)); + assert hadoopSiteParamsAsJsonObject != null; + hadoopConf.set(each, String.valueOf(hadoopSiteParamsAsJsonObject.get(each))); } } hadoopConf.set(HDFS_DEFAULT_FS_KEY, defaultFS); @@ -465,7 +465,7 @@ void moveToDirectory(List srcPaths, String destPath){ } }catch(Exception e){ String message = String.format("occurred error while move srcPaths : %s to destPath: %s ,please check your network", - JSON.toJSONString(srcPaths), destPath); + Json.toJson(srcPaths, null), destPath); LOG.error(message); throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); } @@ -880,7 +880,7 @@ private void transformOrcStruct( MutablePair, Boolean> transportRes for (int t = 0; t < items.length; t++){ items[t] = StringUtils.join(new String[]{"\"",items[t] ,"\""}, ""); } - List list = JSON.parseArray("[" + StringUtils.join(items, split) + "]", String.class); + List list = Json.fromJson("[" + StringUtils.join(items, split) + "]", String.class); List listText = new ArrayList<>(); list.forEach(value -> listText.add(new Text(value))); result.set(i, listText); @@ -897,7 +897,7 @@ private void transformOrcStruct( MutablePair, Boolean> transportRes (new String[]{"\"", attrs[0],"\":\"", attrs[1], "\""}, ""); } } - Map map1 = JSON.parseObject("{" + StringUtils.join(entries, split) + "}", Map.class); + Map map1 = Json.fromJson("{" + StringUtils.join(entries, split) + "}", Map.class); Map mapText = new HashMap<>(); if(null != map1) { map1.forEach((k, v) -> mapText.put(new Text((String) k), new Text((String) v))); diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-mysqlreader/pom.xml b/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml similarity index 94% rename from exchangis-plugins/engine/datax/datax-mysqlreader/pom.xml rename to exchangis-engines/engines/datax/datax-mysqlreader/pom.xml index 4489461c5..12389b823 100644 --- a/exchangis-plugins/engine/datax/datax-mysqlreader/pom.xml +++ b/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml @@ -3,13 +3,13 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 - ${datax.engine.version} + 3.0.0-Plus-2 datax-mysqlreader jar diff --git a/exchangis-plugins/engine/datax/datax-mysqlreader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlreader/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-mysqlreader/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java rename to exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java diff --git a/exchangis-plugins/engine/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java rename to exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-mysqlreader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlreader/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-mysqlwriter/pom.xml b/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml similarity index 94% rename from exchangis-plugins/engine/datax/datax-mysqlwriter/pom.xml rename to exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml index 0fe6fbbbe..7339dbc27 100644 --- a/exchangis-plugins/engine/datax/datax-mysqlwriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 jar - ${datax.engine.version} + 3.0.0-Plus-2 datax-mysqlwriter diff --git a/exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-mysqlwriter/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java rename to exchangis-engines/engines/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java diff --git a/exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/pom.xml b/exchangis-engines/engines/datax/datax-oraclereader/pom.xml similarity index 92% rename from exchangis-plugins/engine/datax/datax-oraclereader/pom.xml rename to exchangis-engines/engines/datax/datax-oraclereader/pom.xml index be259d88c..fd6f86f07 100644 --- a/exchangis-plugins/engine/datax/datax-oraclereader/pom.xml +++ b/exchangis-engines/engines/datax/datax-oraclereader/pom.xml @@ -3,13 +3,13 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 - ${datax.engine.version} + 3.0.0-Plus-2 datax-oraclereader jar @@ -49,7 +49,7 @@ ojdbc6 11.2.0.3 system - ${basedir}/src/main/lib/ojdbc6-11.2.0.3.jar + ${pom.basedir}/src/main/lib/ojdbc6-11.2.0.3.jar diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-oraclereader/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar b/exchangis-engines/engines/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-oraclereader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclereader/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/pom.xml b/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml similarity index 89% rename from exchangis-plugins/engine/datax/datax-oraclewriter/pom.xml rename to exchangis-engines/engines/datax/datax-oraclewriter/pom.xml index 1808822b7..054951019 100644 --- a/exchangis-plugins/engine/datax/datax-oraclewriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 - ${datax.engine.version} + 3.0.0-Plus-2 datax-oraclewriter jar @@ -46,7 +46,7 @@ ojdbc6 11.2.0.3 system - ${basedir}/src/main/lib/ojdbc6-11.2.0.3.jar + ${pom.basedir}/src/main/lib/ojdbc6-11.2.0.3.jar @@ -70,7 +70,7 @@ false false - ${basedir}/src/main/assembly/package.xml + ${pom.basedir}/src/main/assembly/package.xml plugin diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclewriter/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-oraclewriter/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java rename to exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar rename to exchangis-engines/engines/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclewriter/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/pom.xml b/exchangis-engines/engines/datax/datax-textfilereader/pom.xml similarity index 90% rename from exchangis-plugins/engine/datax/datax-textfilereader/pom.xml rename to exchangis-engines/engines/datax/datax-textfilereader/pom.xml index ae2270759..6e7c18313 100644 --- a/exchangis-plugins/engine/datax/datax-textfilereader/pom.xml +++ b/exchangis-engines/engines/datax/datax-textfilereader/pom.xml @@ -3,13 +3,13 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 - ${datax.engine.version} + 3.0.0-Plus-2 datax-textfilereader jar @@ -51,11 +51,6 @@ ${commons-codec} provided - - com.alibaba - fastjson - provided - com.google.guava guava diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-textfilereader/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java similarity index 98% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java index 5b64e76c2..3106a0442 100644 --- a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java @@ -11,9 +11,8 @@ import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderErrorCode; import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; import com.google.common.collect.Sets; +import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.BooleanUtils; @@ -442,7 +441,8 @@ public void destroy() { public void startRead(RecordSender recordSender) { LOG.debug("start read source files..."); for (Object sourceFile : this.sourceFiles) { - PathMeta pathMeta = JSONObject.parseObject(JSON.toJSONString(sourceFile), PathMeta.class); + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + assert pathMeta != null; String fileName = pathMeta.getAbsolute(); LOG.info(String.format("reading file : [%s]", fileName)); InputStream inputStream; @@ -468,7 +468,8 @@ public void startRead(RecordSender recordSender) { public void startRead(ChannelOutput channelOutput) { LOG.info("start read source files to stream channel..."); for(Object sourceFile: this.sourceFiles){ - PathMeta pathMeta = JSONObject.parseObject(JSON.toJSONString(sourceFile), PathMeta.class); + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + assert pathMeta != null; String absolutePath = pathMeta.getAbsolute(); String relativePath = pathMeta.getRelative(); LOG.info(String.format("reading file : [%s]", absolutePath)); diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-textfilereader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilereader/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/pom.xml b/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml similarity index 90% rename from exchangis-plugins/engine/datax/datax-textfilewriter/pom.xml rename to exchangis-engines/engines/datax/datax-textfilewriter/pom.xml index dff38573f..1d0c9f1f3 100644 --- a/exchangis-plugins/engine/datax/datax-textfilewriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml @@ -3,13 +3,13 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-executor-engine-datax + exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 ../pom.xml 4.0.0 - ${datax.engine.version} + 3.0.0-Plus-2 datax-textfilewriter jar @@ -51,11 +51,6 @@ ${commons-codec} provided - - com.alibaba - fastjson - provided - com.google.guava guava diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/assembly/package.xml similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilewriter/src/main/assembly/package.xml rename to exchangis-engines/engines/datax/datax-textfilewriter/src/main/assembly/package.xml diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java rename to exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java rename to exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java rename to exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilewriter/src/main/resources/plugin.json rename to exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin.json diff --git a/exchangis-plugins/engine/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json similarity index 100% rename from exchangis-plugins/engine/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json rename to exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json diff --git a/exchangis-plugins/engine/datax/pom.xml b/exchangis-engines/engines/datax/pom.xml similarity index 86% rename from exchangis-plugins/engine/datax/pom.xml rename to exchangis-engines/engines/datax/pom.xml index 311769e66..15d7f5879 100644 --- a/exchangis-plugins/engine/datax/pom.xml +++ b/exchangis-engines/engines/datax/pom.xml @@ -3,19 +3,19 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-plugins com.webank.wedatasphere.exchangis - 1.0.0 + exchangis + 1.1.2 + ../../../pom.xml 4.0.0 - exchangis-executor-engine-datax + exchangis-engine-datax pom 1.7.25 1.2.3 - 1.2.68 3.1.1 16.0.1 2.8.2 @@ -25,11 +25,12 @@ 1.10 1.2 1.9.4 - 2.7.2 - 1.2.1 + 3.3.4 + 3.1.3 6.7.1 1.11 3.0.0-Plus-2 + 2.11.0 datax-core @@ -58,11 +59,6 @@ commons-pool ${commons-pool} - - com.alibaba - fastjson - ${fastjson-version} - com.google.guava guava @@ -113,6 +109,16 @@ commons-configuration ${commons-configuration-version} + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-common/pom.xml b/exchangis-engines/exchangis-engine-common/pom.xml new file mode 100644 index 000000000..d6cf14efa --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/pom.xml @@ -0,0 +1,30 @@ + + + + exchangis-engines + com.webank.wedatasphere.exchangis + 1.1.2 + + 4.0.0 + + exchangis-engine-common + + + 8 + 8 + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${exchangis.version} + + + org.apache.linkis + linkis-common + ${linkis.version} + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/config/ExchangisEngineConfiguration.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/config/ExchangisEngineConfiguration.java new file mode 100644 index 000000000..728bae3f0 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/config/ExchangisEngineConfiguration.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.engine.config; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Engine configuration + */ +public class ExchangisEngineConfiguration { + + public static final CommonVars ENGINE_RESOURCE_ROOT_PATH = CommonVars.apply("wds.exchangis.engine.root.path", + System.getProperty("user.dir", "/tmp/exchangis/") + "/engine"); + /** + * If need to store the merged resource into local path + */ + public static final CommonVars ENGINE_RESOURCE_MERGE_LOCAL = CommonVars.apply("wds.exchangis.engine.resource.merge.local", true); + + public static final CommonVars ENGINE_RESOURCE_TMP_PATH = CommonVars.apply("wds.exchangis.engine.resource.temp.path", "/tmp/exchangis/engine"); + + /** + * Packet suffix + */ + public static final CommonVars ENGINE_RESOURCE_PACKET_SUFFIX = CommonVars.apply("wds.exchangis.engine.resource.packet.suffix", ".zip"); +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineBmlResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineBmlResource.java new file mode 100644 index 000000000..04806258e --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineBmlResource.java @@ -0,0 +1,105 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Calendar; + +/** + * BML resources + */ +public class EngineBmlResource extends EngineResource { + + private static final Logger LOG = LoggerFactory.getLogger(EngineBmlResource.class); + + public static final String DEFAULT_SCHEME = "bml"; + + /** + * Resource id + */ + private String resourceId; + + /** + * Version + */ + private String version; + + public EngineBmlResource(String engineType, + String path, String name, + String resourceId, String version, String creator){ + this.type = DEFAULT_SCHEME; + this.name = name; + this.engineType = engineType; + this.resourceId = resourceId; + // Use the bml resource id as id + this.id = resourceId; + this.version = version; + this.path = path; + this.creator = creator; + Calendar calendar = Calendar.getInstance(); + this.createTime = calendar.getTime(); + this.modifyTime = calendar.getTime(); + } + + /** + * Get bml resource from stored resource + * @param storeResource store resource + */ + public EngineBmlResource(EngineStoreResource storeResource){ + this(storeResource.engineType, storeResource.path, storeResource.name, null, null, + storeResource.creator); + this.createTime = storeResource.createTime; + this.modifyTime = storeResource.modifyTime; + String storeUri = storeResource.getStoreUri(); + if (StringUtils.isNotBlank(storeUri)){ + try { + String storePath = new URI(storeUri).getPath(); + if (storePath.startsWith(IOUtils.DIR_SEPARATOR_UNIX + "")){ + storePath = storePath.substring(1); + } + String[] storeParts = storePath.split(IOUtils.DIR_SEPARATOR_UNIX + ""); + if (storeParts.length >= 2){ + this.resourceId = storeParts[0]; + this.version = storeParts[1]; + } + } catch (URISyntaxException e) { + LOG.warn("Unrecognized bml stored uri: [{}]", storeUri, e); + } + } + } + + @Override + public InputStream getInputStream() throws IOException { + // TODO get input stream from BML + return null; + } + + @Override + public URI getURI() throws URISyntaxException { + return new URI(DEFAULT_SCHEME, "", IOUtils.DIR_SEPARATOR_UNIX + + resourceId + IOUtils.DIR_SEPARATOR_UNIX + version, null, null); + } + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineLocalPathResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineLocalPathResource.java new file mode 100644 index 000000000..5c038ab57 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineLocalPathResource.java @@ -0,0 +1,91 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; + +import java.io.*; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Calendar; +import java.util.UUID; + +/** + * Local engine resource + */ +public class EngineLocalPathResource extends EngineResource{ + + public static final String DEFAULT_SCHEME = "file"; + + /** + * Local file + */ + private final File localFile; + + /** + * Whether the resource is a packet + */ + private boolean isPacket; + + public EngineLocalPathResource(String engineType, URI baseUri, String path){ + this(engineType, baseUri, path, false); + } + + public EngineLocalPathResource(String engineType, URI baseUri, String path, boolean isPacket){ + this.type = DEFAULT_SCHEME; + this.engineType = engineType; + this.path = path; + this.localFile = new File(baseUri.getPath(), path); + this.name = localFile.getName(); + this.isPacket = isPacket; + Calendar calendar = Calendar.getInstance(); + this.createTime = calendar.getTime(); + this.modifyTime = calendar.getTime(); + // Set the creator as jvm user + this.creator = EnvironmentUtils.getJvmUser(); + // Random resource id + this.id = UUID.randomUUID().toString(); + } + @Override + public InputStream getInputStream() throws IOException { + if (localFile.exists() && localFile.isFile()){ + return new FileInputStream(this.localFile); + } + return null; + } + + @Override + public URI getURI() throws URISyntaxException { + return this.localFile.toURI(); + } + + + public String getPath() { + return path; + } + + public File getLocalFile(){ + return this.localFile; + } + + public boolean isPacket() { + return isPacket; + } + + public void setPacket(boolean isPacket){ + this.isPacket = isPacket; + } + + @Override + public int hashCode() { + return (getEngineType() + ":" + this.localFile.getPath()).hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof EngineLocalPathResource){ + EngineLocalPathResource other = (EngineLocalPathResource)obj; + return this.engineType.equals(other.getEngineType()) && + this.localFile.getPath().equals(other.localFile.getPath()); + } + return super.equals(obj); + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java new file mode 100644 index 000000000..ba74d2596 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java @@ -0,0 +1,127 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Date; + +/** + * Engine resource + */ +public abstract class EngineResource { + + /** + * Engine type + */ + protected String engineType; + + /** + * Resource id + */ + protected String id; + + /** + * Resource name + */ + protected String name; + + /** + * Resource type + */ + protected String type; + + /** + * Resource path + */ + protected String path; + + /** + * Create time + */ + protected Date createTime; + + /** + * Modify time + */ + protected Date modifyTime; + /** + * Create user + */ + protected String creator; + /** + * Get input stream from resource + * @return input stream + */ + public abstract InputStream getInputStream() throws IOException; + + /** + * URI value + * @return uri + */ + public abstract URI getURI() throws URISyntaxException; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineSettings.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineSettings.java new file mode 100644 index 000000000..10eb47a42 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineSettings.java @@ -0,0 +1,179 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.utils.JsonUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Engine settings + */ +public class EngineSettings { + + private static final Logger LOG = LoggerFactory.getLogger(EngineSettings.class); + /** + * ID + */ + private String id; + + /** + * Engine name: engine_name + */ + private String name; + + /** + * Description: engine_desc + */ + private String description; + + /** + * Settings: engine_settings_value + */ + @JsonIgnoreProperties + private String settings; + /** + * Direction: engine_direction => hdfs->local,mysql->hdfs,mysql->hdfs + */ + @JsonIgnoreProperties + private String direction; + + /** + * Resource loader class: res_loader_class + */ + private String resourceLoaderClass; + + /** + * Resource uploader class: res_uploader_class + */ + private String resourceUploaderClass; + + /** + * Direct rules + */ + private final List directionRules = new ArrayList<>(); + + /** + * Setting map + */ + private final Map settingsMap = new HashMap<>(); + + + public List getDirectionRules(){ + if (directionRules.isEmpty() && StringUtils.isNotBlank(direction)){ + synchronized (directionRules) { + if (directionRules.isEmpty()) { + String[] directs = direction.split(","); + for (String direct : directs) { + String[] parts = direct.trim().split("->"); + if (parts.length == 2) { + directionRules.add(new Direction(parts[0].trim(), parts[1].trim())); + } + } + } + } + } + return directionRules; + } + + @SuppressWarnings("unchecked") + public Map getSettingsMap(){ + if (settingsMap.isEmpty() && StringUtils.isNotBlank(settings)){ + synchronized (settingsMap){ + if (settingsMap.isEmpty()){ + try { + settingsMap.putAll(JsonUtils.jackson().reader().readValue(settings, Map.class)); + }catch(Exception e){ + // Ignore + LOG.warn("Fail to load engine settings properties", e); + } + } + } + } + return settingsMap; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getSettings() { + return settings; + } + + public void setSettings(String settings) { + this.settings = settings; + } + + public String getDirection() { + return direction; + } + + public void setDirection(String direction) { + this.direction = direction; + } + + public String getResourceLoaderClass() { + return resourceLoaderClass; + } + + public void setResourceLoaderClass(String resourceLoaderClass) { + this.resourceLoaderClass = resourceLoaderClass; + } + + public String getResourceUploaderClass() { + return resourceUploaderClass; + } + + public void setResourceUploaderClass(String resourceUploaderClass) { + this.resourceUploaderClass = resourceUploaderClass; + } + + + public static class Direction{ + /** + * Source type + */ + private final String source; + + /** + * Sink type + */ + private final String sink; + + public Direction(String source, String sink){ + this.source = source; + this.sink = sink; + } + + public String getSource() { + return source; + } + + public String getSink() { + return sink; + } + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineStoreResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineStoreResource.java new file mode 100644 index 000000000..dd220d0ed --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineStoreResource.java @@ -0,0 +1,59 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Calendar; +import java.util.Objects; +import java.util.Optional; + +/** + * Engine store into database + */ +public class EngineStoreResource extends EngineResource{ + + private String storeUri; + + public EngineStoreResource(EngineResource engineResource){ + this.engineType = engineResource.getEngineType(); + this.name = engineResource.getName(); + this.type = engineResource.getType(); + this.path = engineResource.getPath(); + this.createTime = Optional.ofNullable(engineResource.getCreateTime()) + .orElse(Calendar.getInstance().getTime()); + this.modifyTime = Optional.ofNullable(engineResource.getModifyTime()) + .orElse(Calendar.getInstance().getTime()); + this.creator = engineResource.getCreator(); + try { + URI uri = engineResource.getURI(); + if (Objects.nonNull(uri)){ + this.storeUri = uri.toString(); + } + } catch (Exception e){ + // Ignore + } + + } + + public EngineStoreResource(){ + + } + @Override + public InputStream getInputStream() throws IOException { + throw new IllegalArgumentException("Unsupported method 'getInputStream()'"); + } + + @Override + public URI getURI() throws URISyntaxException { + throw new IllegalArgumentException("Unsupported method 'getURI()'"); + } + + public String getStoreUri() { + return storeUri; + } + + public void setStoreUri(String storeUri) { + this.storeUri = storeUri; + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineExceptionCode.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineExceptionCode.java new file mode 100644 index 000000000..db1b957f4 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineExceptionCode.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +/** + * Exception code fo engine + * 32000 ~ 32999 + */ +public enum ExchangisEngineExceptionCode { + RESOURCE_ERROR(32000), + RESOURCE_LOAD_ERROR(32001), + RESOURCE_UPLOAD_ERROR(32002); + + private int code; + + ExchangisEngineExceptionCode(int code) { + this.code = code; + } + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResException.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResException.java new file mode 100644 index 000000000..c195cfd0f --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResException.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +import org.apache.linkis.common.exception.ErrorException; + +import static com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineExceptionCode.RESOURCE_ERROR; + +/** + * Engine resource exception + */ +public class ExchangisEngineResException extends ErrorException { + public ExchangisEngineResException(String desc) { + this(desc, null); + } + + public ExchangisEngineResException(String desc, Throwable t){ + super(RESOURCE_ERROR.getCode(), desc); + super.initCause(t); + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResLoadException.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResLoadException.java new file mode 100644 index 000000000..afebd8911 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResLoadException.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +import static com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineExceptionCode.RESOURCE_LOAD_ERROR; + +/** + * Engine resource load exception + */ +public class ExchangisEngineResLoadException extends ExchangisEngineResException { + + + public ExchangisEngineResLoadException(String desc) { + super(desc); + } + + public ExchangisEngineResLoadException(String desc, Throwable t) { + super(desc, t); + super.setErrCode(RESOURCE_LOAD_ERROR.getCode()); + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResUploadException.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResUploadException.java new file mode 100644 index 000000000..02e415d62 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResUploadException.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +import static com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineExceptionCode.RESOURCE_UPLOAD_ERROR; + +/** + * Engine resource upload exception + */ +public class ExchangisEngineResUploadException extends ExchangisEngineResException{ + public ExchangisEngineResUploadException(String desc) { + super(desc); + } + + public ExchangisEngineResUploadException(String desc, Throwable t) { + super(desc, t); + super.setErrCode(RESOURCE_UPLOAD_ERROR.getCode()); + } +} diff --git a/exchangis-engines/exchangis-engine-core/pom.xml b/exchangis-engines/exchangis-engine-core/pom.xml new file mode 100644 index 000000000..5d040b341 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/pom.xml @@ -0,0 +1,37 @@ + + + + exchangis-engines + com.webank.wedatasphere.exchangis + 1.1.2 + + 4.0.0 + + exchangis-engine-core + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-engine-common + ${exchangis.version} + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${exchangis.version} + + + + org.apache.linkis + linkis-bml-client + ${linkis.version} + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/ExchangisEngine.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/ExchangisEngine.java new file mode 100644 index 000000000..2902dbad8 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/ExchangisEngine.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.engine; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +/** + * Exchangis engine + */ +public interface ExchangisEngine { + + /** + * engine name + * @return + */ + String getName(); + + /** + * Settings + * @return settings + */ + EngineSettings getSettings(); + + /** + * Resource container + * @return container + */ + EngineResourceContainer getResourceContainer(); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/GenericExchangisEngine.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/GenericExchangisEngine.java new file mode 100644 index 000000000..190934005 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/GenericExchangisEngine.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.engine; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +import java.util.Objects; + +/** + * Generic implement + */ +public class GenericExchangisEngine implements ExchangisEngine{ + + @Override + public String getName() { + return Objects.nonNull(settings) ? settings.getName(): null; + } + + /** + * Settings + */ + private EngineSettings settings; + + /** + * Resource container + */ + private EngineResourceContainer resourceContainer; + + public EngineSettings getSettings() { + return settings; + } + + public void setSettings(EngineSettings settings) { + this.settings = settings; + } + + @SuppressWarnings("unchecked") + public EngineResourceContainer getResourceContainer() { + return (EngineResourceContainer) resourceContainer; + } + + public void setResourceContainer(EngineResourceContainer resourceContainer) { + this.resourceContainer = resourceContainer; + } + +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineResourceDao.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineResourceDao.java new file mode 100644 index 000000000..bc0b006b9 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineResourceDao.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.engine.dao; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineStoreResource; + +import java.util.List; + +/** + * Engine resource dao + */ +public interface EngineResourceDao { + + /** + * Get resources by engine type + * @param engineType engine type + * @return store resource + */ + List getResources(String engineType); + + /** + * Insert Resource + * @param storeResource store resource + */ + void insertResource(EngineStoreResource storeResource); + + /** + * Update resource + * @param storeResource store resource + */ + void updateResource(EngineStoreResource storeResource); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineSettingsDao.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineSettingsDao.java new file mode 100644 index 000000000..8ac429feb --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineSettingsDao.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.engine.dao; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; + +import java.util.List; + +/** + * Engine settings dao + */ +public interface EngineSettingsDao { + /** + * Settings + * @return list + */ + List getSettings(); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/DefaultExchangisEngineManager.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/DefaultExchangisEngineManager.java new file mode 100644 index 000000000..5a2cd179b --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/DefaultExchangisEngineManager.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.engine.manager; + +import com.webank.wedatasphere.exchangis.engine.ExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.GenericExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +import java.util.HashMap; +import java.util.Map; + + +/** + * Default Engine manager + */ +public class DefaultExchangisEngineManager implements ExchangisEngineManager { + + private static final ExchangisEngine EMPTY_ENGINE = new GenericExchangisEngine(); + /** + * Engine context + */ + protected Map engineContextMap = new HashMap<>(); + @Override + public EngineSettings getSettings(String engine) { + return engineContextMap.getOrDefault(engine, EMPTY_ENGINE).getSettings(); + } + + @Override + @SuppressWarnings("unchecked") + public EngineResourceContainer getResourceContainer(String engine) { + return (EngineResourceContainer) + engineContextMap.getOrDefault(engine, EMPTY_ENGINE).getResourceContainer(); + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/ExchangisEngineManager.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/ExchangisEngineManager.java new file mode 100644 index 000000000..907bb669a --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/ExchangisEngineManager.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.engine.manager; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +/** + * Engine manager + */ +public interface ExchangisEngineManager { + + /** + * + * @param engine engine + * @return + */ + EngineSettings getSettings(String engine); + + /** + * Get engine resource container + * @param engine engine + * @param + * @param + * @return + */ + EngineResourceContainer getResourceContainer(String engine); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/provider/ExchangisEngineProvider.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/provider/ExchangisEngineProvider.java new file mode 100644 index 000000000..2dc4f7fb5 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/provider/ExchangisEngineProvider.java @@ -0,0 +1,11 @@ +package com.webank.wedatasphere.exchangis.engine.provider; + +import com.webank.wedatasphere.exchangis.engine.ExchangisEngine; + +import java.util.Map; + +public interface ExchangisEngineProvider { + + + ExchangisEngine getEngines(Map params); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java new file mode 100644 index 000000000..b7994bac4 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java @@ -0,0 +1,394 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineStoreResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; +import com.webank.wedatasphere.exchangis.engine.utils.ResourceUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.net.URI; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Consumer; + +/** + * Abstract engine resource container + */ +public abstract class AbstractEngineResourceContainer implements EngineResourceContainer { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractEngineResourceContainer.class); + + /** + * Resource root uri + */ + protected final URI rootUri; + /** + * Engine type + */ + private final String engineType; + + /** + * Resource dao + */ + protected final EngineResourceDao engineResourceDao; + + /** + * Resource loader in container + */ + protected final EngineResourceLoader engineResourceLoader; + + /** + * Resource uploader in container + */ + protected final EngineResourceUploader engineResourceUploader; + + /** + * Root node + */ + private final ResourcePathNode rootNode = new ResourcePathNode("/"); + + public AbstractEngineResourceContainer(String engineType, String rootPath, EngineResourceDao resourceDao, + EngineResourceLoader resourceLoader, + EngineResourceUploader resourceUploader){ + this.engineType = engineType; + this.rootUri = new File(ResourceUtils.normalizeFilePath(rootPath)).toURI(); + this.engineResourceDao = resourceDao; + this.engineResourceLoader = resourceLoader; + this.engineResourceUploader = resourceUploader; + } + + @Override + public String getEngineType() { + return engineType; + } + + @Override + public List getResources(String resourcePath) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + return pathNode.getSubEngineResources(); + } + } + return null; + } + + @Override + public void addResource(String resourcePath, T engineResource) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + operateResPathNode(paths, pathNode -> pathNode.addSubEngineResource(engineResource)); + } + } + + @Override + public void updateResources(String resourcePath, T[] engineResources) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + pathNode.updateSubEngineResource(engineResources); + } + } + } + + @Override + public T getResource(String resourcePath, String resourceId) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + return pathNode.getSubEngineResource(resourceId); + } + } + return null; + } + + @Override + public void flushResources(String resourcePath) throws ExchangisEngineResException{ + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + flushResources(searchResPathNode(paths)); + } + } + + @Override + public void flushAllResources() throws ExchangisEngineResException{ + Queue queue = new LinkedList<>(); + queue.offer(this.rootNode); + while(!queue.isEmpty()){ + ResourcePathNode currentNode = queue.poll(); + if (currentNode.hasSubEngineResources()){ + flushResources(currentNode); + } + currentNode.childNodes.values().forEach(queue::offer); + } + } + + @Override + public U getRemoteResource(String resourcePath) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + return pathNode.getRemoteResource(); + } + } + return null; + } + + @Override + public void removeResource(String resourcePath, String resourceId) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + pathNode.removeSubEngineResource(resourceId); + } + } + } + + @Override + public EngineResourceLoader getResourceLoader() { + return engineResourceLoader; + } + + @Override + public EngineResourceUploader getResourceUploader() { + return engineResourceUploader; + } + + protected void operateResPathNode(String[] paths, Consumer operate){ + operateResPathNode(null, paths, 0, operate); + } + + protected ResourcePathNode searchResPathNode(String[] paths){ + return searchResPathNode(null, paths, 0); + } + /** + * Operate resource path node + * @param parentNode parent node + * @param paths paths + * @param pos pos + * @param operate operate function + */ + private void operateResPathNode(ResourcePathNode parentNode, String[] paths, int pos, + Consumer operate){ + int upper = Math.min(pos + 1, paths.length); + String[] subPath = new String[upper]; + System.arraycopy(paths, 0, subPath, 0, upper); + // path + String path = subPath.length <= 1 ? "/" : StringUtils.join(subPath, "/"); + ResourcePathNode currentNode; + if (null == parentNode){ + if (path.equals("/")) { + currentNode = this.rootNode; + } else { + LOG.warn("Path: {} should start with '/'", StringUtils.join(paths, "/")); + return; + } + } else { + currentNode = parentNode.childNodes.computeIfAbsent(path, ResourcePathNode::new); + } + if (upper >= paths.length){ + operate.accept(currentNode); + } else { + operateResPathNode(currentNode, paths, pos + 1, operate); + } + } + + /** + * Search resource path node + * @param parentNode parent node + * @param paths paths + * @param pos pos + * @return resource path node + */ + private ResourcePathNode searchResPathNode(ResourcePathNode parentNode, String[] paths, int pos){ + int upper = Math.min(pos + 1, paths.length); + String[] subPath = new String[upper]; + System.arraycopy(paths, 0, subPath, 0, upper); + // path + String path = subPath.length <= 1 ? "/" : StringUtils.join(subPath, "/"); + ResourcePathNode currentNode; + if (null == parentNode){ + if (path.equals("/")) { + currentNode = this.rootNode; + } else { + LOG.warn("Path: {} should start with '/'", StringUtils.join(paths, "/")); + return null; + } + } else { + currentNode = parentNode.childNodes.get(path); + } + if (upper >= paths.length || Objects.isNull(currentNode)){ + return currentNode; + } + return searchResPathNode(currentNode, paths, pos + 1); + } + + private void flushResources(ResourcePathNode pathNode) throws ExchangisEngineResException { + if(Objects.nonNull(pathNode)){ + LOG.info("Flush the {} engine resources in path: [{}]", getEngineType(), pathNode.getPath()); + T nodeEngineRes = mergeNodeEngineResource(pathNode); + if (Objects.nonNull(nodeEngineRes)){ + // Mark the resource under the path + nodeEngineRes.setPath(pathNode.path); + // Try tp upload the node engine resource + try { + U uploadedRes = this.engineResourceUploader.upload(nodeEngineRes, pathNode.getRemoteResource()); + if (Objects.nonNull(uploadedRes)) { + // Store the uploaded remoted resource information + if (Objects.nonNull(pathNode.getRemoteResource())) { + this.engineResourceDao.updateResource(new EngineStoreResource(uploadedRes)); + } else { + this.engineResourceDao.insertResource(new EngineStoreResource(uploadedRes)); + } + pathNode.setRemoteResource(uploadedRes); + } + }catch(Exception e){ + // Not throw + LOG.warn(null, e); + } + } + } + } + protected String[] pathSplit(String path){ + return path == null ? null : path.split("/"); + } + + /** + * Merge the engine resources in path node + * @param pathNode path node + * @return + */ + protected abstract T mergeNodeEngineResource(ResourcePathNode pathNode); + /** + * Resource path node (in tree) + */ + protected class ResourcePathNode{ + + /** + * Resource path + */ + protected final String path; + /** + * Node lock + */ + protected final ReentrantReadWriteLock nodeLock; + + /** + * Modify time + */ + protected long lastModifyTime = -1; + + /** + * Resource in data + */ + protected final Map subResources = new HashMap<>(); + + /** + * Remote resource + */ + protected U remoteResource; + + /** + * Children nodes + */ + protected Map childNodes = new ConcurrentHashMap<>(); + + public ResourcePathNode(String path){ + this.path = path; + this.nodeLock = new ReentrantReadWriteLock(); + this.lastModifyTime = 0L; + } + + public void updateSubEngineResource(T[] engineResources){ + nodeLock.writeLock().lock(); + try{ + subResources.clear(); + if (Objects.nonNull(engineResources)){ + final AtomicLong modifyTime = new AtomicLong(0); + Arrays.asList(engineResources).forEach(engineResource -> { + Date resourceTime = engineResource.getModifyTime(); + if (resourceTime.getTime() > modifyTime.get()){ + modifyTime.set(resourceTime.getTime()); + } + subResources.put(engineResource.getId(), engineResource); + }); + this.lastModifyTime = modifyTime.get(); + } + } finally { + nodeLock.writeLock().unlock(); + } + } + public void addSubEngineResource(T engineResource){ + nodeLock.writeLock().lock(); + try{ + subResources.put(engineResource.getId(), engineResource); + Date resourceTime = engineResource.getModifyTime(); + if (resourceTime.getTime() > lastModifyTime){ + this.lastModifyTime = resourceTime.getTime(); + } + }finally { + nodeLock.writeLock().unlock(); + } + } + + public List getSubEngineResources(){ + nodeLock.readLock().lock(); + try{ + List resources = new ArrayList<>(); + subResources.forEach((key, resource) -> resources.add(resource)); + return resources; + }finally { + nodeLock.readLock().unlock(); + } + } + public boolean hasSubEngineResources(){ + nodeLock.readLock().lock(); + try{ + return !subResources.isEmpty(); + }finally { + nodeLock.readLock().unlock(); + } + } + public T getSubEngineResource(String resourceId){ + nodeLock.readLock().lock(); + try{ + return subResources.get(resourceId); + }finally { + nodeLock.readLock().unlock(); + } + } + + public EngineResource removeSubEngineResource(String resourceId){ + nodeLock.writeLock().lock(); + try{ + return subResources.remove(resourceId); + }finally { + nodeLock.writeLock().unlock(); + } + } + public U getRemoteResource(){ + return remoteResource; + } + + public void setRemoteResource(U engineResource){ + this.remoteResource = engineResource; + } + + public String getPath() { + return path; + } + } + +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceLoader.java new file mode 100644 index 000000000..2933c6453 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceLoader.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.resource.uri.ResourceURLStreamHandlerFactory; +import org.apache.linkis.common.conf.CommonVars; + +import java.net.URL; + +/** + * Abstract resource loader + * @param + */ +public abstract class AbstractEngineResourceLoader implements EngineResourceLoader{ + /** + * Support schemes for uri + */ + private static final CommonVars SUPPORT_SCHEMES = CommonVars.apply("wds.exchangis.engine.resource.schemes", "bml,hdfs,viewfs"); + static{ + URL.setURLStreamHandlerFactory(new ResourceURLStreamHandlerFactory( + SUPPORT_SCHEMES.getValue().split(","))); + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourceContainer.java new file mode 100644 index 000000000..07117fd5b --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourceContainer.java @@ -0,0 +1,107 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineStoreResource; +import com.webank.wedatasphere.exchangis.engine.utils.ResourceUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +public class DefaultEngineResourceContainer extends AbstractEngineResourceContainer { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultEngineResourceContainer.class); + + public DefaultEngineResourceContainer(String engineType, String rootPath, EngineResourceDao resourceDao, + EngineResourceLoader resourceLoader, + EngineResourceUploader resourceUploader) { + super(engineType, rootPath, resourceDao, resourceLoader, resourceUploader); + } + + @Override + public void init() { + List storeResources = this.engineResourceDao.getResources(getEngineType()); + storeResources.forEach(storeResource -> { + String path = storeResource.getPath(); + if (StringUtils.isNotBlank(path)){ + operateResPathNode(pathSplit(path), resourcePathNode -> + resourcePathNode.setRemoteResource(new EngineBmlResource(storeResource))); + } + }); + } + /** + * + * @param pathNode resource path node + * @return engine resource + */ + protected EngineLocalPathResource mergeNodeEngineResource(ResourcePathNode pathNode){ + if (Objects.isNull(pathNode.getRemoteResource()) || pathNode.getRemoteResource() + .getModifyTime().getTime() < pathNode.lastModifyTime) { + ReentrantReadWriteLock nodeLock = pathNode.nodeLock; + List resourcesFiltered; + nodeLock.readLock().lock(); + try { + resourcesFiltered = pathNode.subResources.values().stream().filter(Objects::nonNull) + .collect(Collectors.toList()); + }finally { + nodeLock.readLock().unlock(); + } + if (resourcesFiltered.size() == 1 && resourcesFiltered.get(0).isPacket()){ + //Ignore the packet resource + return resourcesFiltered.get(0); + } + // Merged resource is a local resource, its name is equal to the path in pathNode + String mergedResourcePath; + boolean temp = false; + if (ExchangisEngineConfiguration.ENGINE_RESOURCE_MERGE_LOCAL.getValue()) { + // Need to store the merged resource into local path + String rootPath = rootUri.getPath(); + mergedResourcePath = (rootPath.endsWith(IOUtils.DIR_SEPARATOR + "")? rootPath : rootPath + IOUtils.DIR_SEPARATOR ) + + pathNode.getPath() + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue(); + } else { + File temporaryPath = new File(ExchangisEngineConfiguration.ENGINE_RESOURCE_TMP_PATH.getValue()); + if (temporaryPath.mkdir()) { + LOG.info("Auto create the engine temporary directory [{}]", temporaryPath.getAbsolutePath()); + } + mergedResourcePath = temporaryPath.getAbsolutePath() + IOUtils.DIR_SEPARATOR + UUID.randomUUID(); + temp = true; + } + synchronized ((getEngineType() + ":" + pathNode.getPath()).intern()){ + // 1. DELETE the exists local resource + File resourceFile = new File(mergedResourcePath); + if (resourceFile.exists()){ + if (resourceFile.delete()){ + LOG.info("Success to delete the existed local resource file [{}] before", resourceFile.getPath()); + }else { + LOG.warn("Fail to delete the existed local resource file [{}], please examine the file permissions or occupation from the other program!", resourceFile.getPath()); + } + } + try { + if (resourceFile.createNewFile()) { + ResourceUtils.combinePacket(resourcesFiltered.stream().toArray(value -> new EngineResource[resourcesFiltered.size()]), new FileOutputStream(resourceFile)); + if (temp) { + resourceFile.deleteOnExit(); + } + return new EngineLocalPathResource(getEngineType(), rootUri, pathNode.getPath() + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue()); + } + } catch (IOException e) { + LOG.warn("Exception in combing and packet resources in [{}]", pathNode.getPath(), e); + } + } + } + return null; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourcePathScanner.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourcePathScanner.java new file mode 100644 index 000000000..3cd53a1c0 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourcePathScanner.java @@ -0,0 +1,139 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResLoadException; +import com.webank.wedatasphere.exchangis.engine.utils.ResourceUtils; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FilenameFilter; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Default path scanner + */ +public class DefaultEngineResourcePathScanner implements EngineResourcePathScanner{ + + private static final Logger LOG = LoggerFactory.getLogger(DefaultEngineResourcePathScanner.class); + /** + * Resource loader list + */ + private final Map> resourceLoaders = new ConcurrentHashMap<>(); + + @Override + public void registerResourceLoader(EngineResourceLoader resourceLoader) { + LOG.info("Register the resource loader: '{}'", resourceLoader.getClass().getCanonicalName()); + this.resourceLoaders.put(resourceLoader.engineType(), resourceLoader); + } + + @Override + public Set doScan(String rootPath) throws ExchangisEngineResException { + rootPath = FilenameUtils.normalize(rootPath); + File rootFile = new File(rootPath); + List resources = new ArrayList<>(); + if (!rootFile.exists()){ + throw new ExchangisEngineResLoadException("The engine resource root path: [" + rootPath +"] doesn't exist"); + } + if (rootFile.isFile()){ + throw new ExchangisEngineResLoadException("The engine resource root path: [" + rootPath + "] should be a directory/link, but not a file"); + } else { + LOG.info("Start to scan the resource root path: [{}]", rootPath); + resourceLoaders.forEach((engine, resourceLoader) ->{ + File engineFile = new File(rootFile, engine.toLowerCase()); + if (engineFile.exists() && engineFile.isDirectory()){ + LOG.info("Scan the resource path for engine: [{}] in [{}]", engine.toLowerCase(), engineFile.getPath()); + resources.addAll(scanPathAndLoadResource(rootFile.toURI(), + IOUtils.DIR_SEPARATOR + engineFile.getName(), (baseUri, path) -> resourceLoader.accept(baseUri, path)? resourceLoader : null)); + } else { + LOG.warn("Cannot find the resource path for engine: [{}] in [{}], ignore it.", engine.toLowerCase(), engineFile.getPath()); + } + }); + } + return new HashSet<>(resources); + } + + + private List scanPathAndLoadResource(URI baseUri, String path, + BiFunction> getResLoader) { + List resources = new ArrayList<>(); + File rootFile = new File(baseUri.getPath(), path); + if (rootFile.isDirectory()) { + File[] childFiles = rootFile.listFiles((dir, name) -> { + // skip the hidden file + return !name.startsWith("."); + }); + if (Objects.nonNull(childFiles)) { + List scanDirs = new ArrayList<>(); + List skipNames = new ArrayList<>(); + List directories = Arrays.stream(childFiles) + .filter(File::isDirectory).collect(Collectors.toList()); + directories.forEach(dir -> { + try { + String dirPath = path + IOUtils.DIR_SEPARATOR + dir.getName(); + EngineResourceLoader resourceLoader + = getResLoader.apply(baseUri, dirPath); + if (Objects.nonNull(resourceLoader)) { + resources.addAll(Arrays.asList(resourceLoader.loadResource(baseUri, dirPath))); + skipNames.add(dir.getName()); + } else { + scanDirs.add(dir); + } + } catch (Exception e) { + LOG.warn("Exception in loading engine directory resource: [" + dir.getPath() + "]", e); + } + }); + List rawFiles = Arrays.stream(childFiles).filter(file -> + file.isFile() && skipNames.stream().noneMatch(skipName -> + file.getName().equals(skipName + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue()))) + .collect(Collectors.toList()); + rawFiles.forEach(rawFile -> { + try { + String rawFilePath = path + IOUtils.DIR_SEPARATOR + rawFile.getName(); + EngineResourceLoader resourceLoader = + getResLoader.apply(baseUri, rawFilePath); + if (Objects.nonNull(resourceLoader)){ + EngineLocalPathResource[] resArray = resourceLoader.loadResource(baseUri, rawFilePath); + if (resArray.length == 1 && rawFile.getName() + .endsWith(ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue())) { + LOG.info("Mark the engine resource: [{}] as a packet({}) resource", rawFile.getPath(), + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue()); + resArray[0].setPacket(true); + Path source = rawFile.toPath(); + Path dest = source.resolveSibling(StringUtils.substringBefore(rawFile.getName(), ".")); + if (!Files.isDirectory(dest)) { + Files.createDirectory(dest); + } + LOG.info("Un packet the engine resource: [{}] to [{}]", source, dest); + ResourceUtils.unPacket(source, dest); + // Update the path value + resArray[0].setPath(StringUtils.substringBeforeLast(rawFilePath, ".")); + } + resources.addAll(Arrays.asList(resArray)); + } + } catch (Exception e){ + LOG.warn("Exception in loading engine file resource: [" + rawFile.getPath() + "]", e); + } + }); + for(File scanDir : scanDirs) { + resources.addAll(scanPathAndLoadResource(baseUri, path + IOUtils.DIR_SEPARATOR + scanDir.getName(), getResLoader)); + } + } + } + return resources; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceContainer.java new file mode 100644 index 000000000..c698f3450 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceContainer.java @@ -0,0 +1,89 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; + +import java.util.List; + +/** + * Resource container + */ +public interface EngineResourceContainer { + + /** + * Engine type related + * @return string + */ + String getEngineType(); + + /** + * Init method + */ + void init(); + /** + * Get existed engine resources from resource path + * @param resourcePath resource path + * @return engine resources + */ + List getResources(String resourcePath); + + /** + * Add resource to path in container + * @param resourcePath resource path + * @param engineResource engine resource + */ + void addResource(String resourcePath, T engineResource); + + /** + * Update resource + * @param resourcePath resource path + * @param engineResource engine resource + * @return resource list + */ + void updateResources(String resourcePath, T[] engineResource); + + /** + * Get resource by path and id + * @param resourcePath resource path + * @param resourceId resource id + * @return engine + */ + T getResource(String resourcePath, String resourceId); + + /** + * Flush(upload) resources in path + * @param resourcePath resource path + * @return (merged)resource + */ + void flushResources(String resourcePath) throws ExchangisEngineResException; + + /** + * Flush(upload) all the resources in container + */ + void flushAllResources() throws ExchangisEngineResException; + + /** + * Get remote(upload) resource in path + * @param resourcePath resource path + * @return (merged)resource + */ + U getRemoteResource(String resourcePath); + /** + * Remove resource + * @param resourcePath resource path + * @param resourceId resource id + */ + void removeResource(String resourcePath, String resourceId); + + /** + * Engine resource loader + * @return scanner + */ + EngineResourceLoader getResourceLoader(); + + /** + * Engine resource uploader + * @return resource uploader + */ + EngineResourceUploader getResourceUploader(); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceLoader.java new file mode 100644 index 000000000..dd5a74241 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceLoader.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResLoadException; + +import java.net.URI; + +/** + * Engine resource loader + * @param + */ +public interface EngineResourceLoader { + /** + * Engine type + * @return engine + */ + String engineType(); + /** + * Accept uri + * @param baseUri uri + * @return boolean + */ + boolean accept(URI baseUri, String path); + + /** + * Load resources from uri + * @param baseUri uri + * @return resource array + */ + T[] loadResource(URI baseUri, String path) throws ExchangisEngineResLoadException; +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourcePathScanner.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourcePathScanner.java new file mode 100644 index 000000000..1621abb7b --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourcePathScanner.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; + +import java.util.Set; + +/** + * Engine resource path scanner + */ +public interface EngineResourcePathScanner { + + /** + * Register resource loader + * @param resourceLoader resource loader + */ + void registerResourceLoader(EngineResourceLoader resourceLoader); + /** + * Scan entrance + * @param rootPath root path + */ + Set doScan(String rootPath) throws ExchangisEngineResException; +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceUploader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceUploader.java new file mode 100644 index 000000000..3e059e937 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceUploader.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResUploadException; + +/** + * Engine resource uploader + * @param + * @param + */ +public interface EngineResourceUploader { + + /** + * upload method + * @param needUploadResource resource need to be uploaded + * @return uploaded resource + */ + R upload(T needUploadResource) throws ExchangisEngineResUploadException; + + R upload(T needUploadResource, R relatedResource) throws ExchangisEngineResUploadException; +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlClients.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlClients.java new file mode 100644 index 000000000..bb41f626e --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlClients.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.engine.resource.bml; + +import org.apache.linkis.bml.client.BmlClient; +import org.apache.linkis.bml.client.BmlClientFactory; + +/** + * BML client + */ +public class BmlClients { + + private static final BmlClient DEFAULT_CLIENT; + static{ + //TODO use the common client configuration + DEFAULT_CLIENT = BmlClientFactory.createBmlClient(); + } + + public static BmlClient getInstance(){ + return DEFAULT_CLIENT; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlEngineResourceUploader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlEngineResourceUploader.java new file mode 100644 index 000000000..a65a3cd0d --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlEngineResourceUploader.java @@ -0,0 +1,53 @@ +package com.webank.wedatasphere.exchangis.engine.resource.bml; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResUploadException; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceUploader; +import org.apache.linkis.bml.protocol.BmlUpdateResponse; +import org.apache.linkis.bml.protocol.BmlUploadResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Objects; + +/** + * Bml engine resource uploader + */ +public class BmlEngineResourceUploader implements EngineResourceUploader { + + private static final Logger LOG = LoggerFactory.getLogger(BmlEngineResourceUploader.class); + + @Override + public EngineBmlResource upload(EngineLocalPathResource res) throws ExchangisEngineResUploadException { + try { + BmlUploadResponse uploadResponse = BmlClients.getInstance() + .uploadResource(res.getCreator(), res.getName(), res.getInputStream()); + return new EngineBmlResource(res.getEngineType(), res.getPath(), + res.getName(), uploadResponse.resourceId(), uploadResponse.version(), res.getCreator()); + } catch (Exception e){ + throw new ExchangisEngineResUploadException( + "Fail to upload resource: [name: " + res.getName() + ", path: " + res.getPath() + + ", type: "+ res.getType() + ", creator: "+ res.getCreator() + "]", e); + } + } + + @Override + public EngineBmlResource upload(EngineLocalPathResource res, EngineBmlResource relatedResource) throws ExchangisEngineResUploadException { + if (Objects.isNull(relatedResource)){ + return upload(res); + } + try { + BmlUpdateResponse response = BmlClients.getInstance() + .updateResource(res.getCreator(), relatedResource.getResourceId(), + res.getName(), res.getInputStream()); + return new EngineBmlResource(relatedResource.getEngineType(), res.getPath(), + res.getName(), response.resourceId(), response.version(), res.getCreator()); + } catch (Exception e){ + throw new ExchangisEngineResUploadException( + "Fail to upload resource: [name: " + res.getName() + ", path: " + res.getPath() + + ", type: "+ res.getType() + ", resourceId: " + relatedResource.getResourceId() + + ",creator: "+ res.getCreator() + "]", e); + } + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/AbstractEngineLocalPathResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/AbstractEngineLocalPathResourceLoader.java new file mode 100644 index 000000000..34705b1e7 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/AbstractEngineLocalPathResourceLoader.java @@ -0,0 +1,129 @@ +package com.webank.wedatasphere.exchangis.engine.resource.loader; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResLoadException; +import com.webank.wedatasphere.exchangis.engine.resource.AbstractEngineResourceLoader; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.net.URI; +import java.util.Arrays; +import java.util.Date; +import java.util.Objects; +import java.util.regex.Pattern; + +/** + * Load the engine resources by local path + */ +public abstract class AbstractEngineLocalPathResourceLoader extends AbstractEngineResourceLoader { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractEngineLocalPathResourceLoader.class); + private static final String DEFAULT_SUPPORT_SCHEMA = "file"; + + /** + * Pattern object + */ + private Pattern[] patterns = new Pattern[0]; + + public AbstractEngineLocalPathResourceLoader(){ + String[] pathPatterns = pathPatterns(); + if (Objects.nonNull(pathPatterns)){ + patterns = new Pattern[pathPatterns.length]; + for(int i = 0; i < pathPatterns.length; i++){ + Pattern pattern = Pattern.compile(pathPatterns[i]); + patterns[i] = pattern; + } + } + } + + @Override + public boolean accept(URI baseUri, String path) { + if (StringUtils.isBlank(baseUri.getScheme()) || DEFAULT_SUPPORT_SCHEMA.equals(baseUri.getScheme())){ + return Arrays.stream(patterns) + .anyMatch(pattern -> pattern.matcher(path).matches()); + } + return false; + } + + @Override + public EngineLocalPathResource[] loadResource(URI baseUri, String path) throws ExchangisEngineResLoadException { + LOG.info("Load local engine resource, path: {}", path); + String scheme = baseUri.getScheme(); + if (StringUtils.isBlank(baseUri.getScheme()) || DEFAULT_SUPPORT_SCHEMA.equals(scheme)){ + return loadLocalResource(baseUri, path); + } else { + throw new ExchangisEngineResLoadException("Unsupported scheme: [" + scheme + "] in basic uri: [" + baseUri + "] for local resource loader."); + } + } + + /** + * Path pattern list + * @return pattern string array + */ + protected abstract String[] pathPatterns(); + /** + * Load local resource + * @param path path + * @return resource array + */ + private EngineLocalPathResource[] loadLocalResource(URI baseUri, String path) throws ExchangisEngineResLoadException { + File localFile = new File(baseUri.getPath(), path); + EngineLocalPathResource[] resources = new EngineLocalPathResource[0]; + if (localFile.isDirectory()) { + File[] resourceFiles = localFile.listFiles(); + if (Objects.nonNull(resourceFiles)) { + resources = new EngineLocalPathResource[resourceFiles.length]; + for (int i = 0; i < resources.length; i++) { + resources[i] = createLocalResource(resourceFiles[i], baseUri, path); + } + } + } else if (localFile.isFile()) { + resources = new EngineLocalPathResource[]{createLocalResource(localFile, baseUri, path)}; + } + // Important: make all the resources have the same value in 'path' + for(EngineLocalPathResource resource : resources){ + resource.setPath(path); + } + return resources; + } + + /** + * Create local resource + * @param localFile local file + * @param baseUri base uri + * @param path path + * @return local resource + */ + private EngineLocalPathResource createLocalResource(File localFile, URI baseUri, String path){ + EngineLocalPathResource localResource = new EngineLocalPathResource(engineType(), baseUri, + path + IOUtils.DIR_SEPARATOR + localFile.getName()); + long lastModifyTime = traverseExtractTime(localFile, 0L); + localResource.setCreateTime(new Date(lastModifyTime)); + localResource.setModifyTime(new Date(lastModifyTime)); + return localResource; + } + /** + * Traverse the extract last time + * @param localFile local file + * @param timestamp timestamp + * @return + */ + private long traverseExtractTime(File localFile, long timestamp){ + long lastTime = timestamp; + if (localFile.lastModified() > lastTime){ + lastTime = localFile.lastModified(); + } + if (localFile.isDirectory()){ + File[] subFiles = localFile.listFiles(); + if (Objects.nonNull(subFiles)) { + for (File subFile : subFiles) { + lastTime = traverseExtractTime(subFile, lastTime); + } + } + } + return lastTime; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceConf.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceConf.java new file mode 100644 index 000000000..20dc2f841 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceConf.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.engine.resource.loader.datax; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Resource config for datax + */ +public class DataxEngineResourceConf { + + /** + * Resource path prefix + */ + public static final CommonVars RESOURCE_PATH_PREFIX = CommonVars.apply("wds.exchangis.engine.datax.resource.path-prefix", "/datax/plugin"); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceLoader.java new file mode 100644 index 000000000..cdec9aeae --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceLoader.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.engine.resource.loader.datax; + +import com.webank.wedatasphere.exchangis.engine.resource.loader.AbstractEngineLocalPathResourceLoader; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.regex.Pattern; + +/** + * Datax engine resource loader + */ +public class DataxEngineResourceLoader extends AbstractEngineLocalPathResourceLoader { + + private static final CommonVars ENGINE_DATAX_LOADER_PATH_PATTERN = CommonVars.apply("engine.datax.resource.loader.path-pattern", + StringUtils.join(new String[]{ + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + "/reader/.*[/]?", + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + "/writer/.*[/]?" + }, ",")); + @Override + protected String[] pathPatterns() { + return ENGINE_DATAX_LOADER_PATH_PATTERN.getValue().split(","); + } + + + @Override + public String engineType() { + return "datax"; + } + +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandler.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandler.java new file mode 100644 index 000000000..93fe1c9a7 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandler.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.engine.resource.uri; + +import java.io.IOException; +import java.net.URL; +import java.net.URLConnection; +import java.net.URLStreamHandler; + +/** + * URL stream handler for linkis client (cannot open connection) + */ +public class ResourceURLStreamHandler extends URLStreamHandler { + @Override + protected URLConnection openConnection(URL url) throws IOException { + throw new IllegalArgumentException("Cannot open connection for url [" + url + "]"); + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandlerFactory.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandlerFactory.java new file mode 100644 index 000000000..bcf6c10e5 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandlerFactory.java @@ -0,0 +1,51 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.engine.resource.uri; + +import java.net.URLStreamHandler; +import java.net.URLStreamHandlerFactory; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Engine resource stream handler factory (support specific schemas) + */ +public class ResourceURLStreamHandlerFactory implements URLStreamHandlerFactory { + + /** + * Support schemas + */ + private final List supportSchemas = new ArrayList<>(); + + /** + * Stream handler + */ + private final URLStreamHandler defaultStreamHandler; + + public ResourceURLStreamHandlerFactory(String... schemas){ + supportSchemas.addAll(Arrays.asList(schemas)); + this.defaultStreamHandler = new ResourceURLStreamHandler(); + } + + @Override + public URLStreamHandler createURLStreamHandler(String protocol) { + if (supportSchemas.stream().anyMatch( schema -> schema.equals(protocol))){ + return this.defaultStreamHandler; + } + return null; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/utils/ResourceUtils.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/utils/ResourceUtils.java new file mode 100644 index 000000000..f63422313 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/utils/ResourceUtils.java @@ -0,0 +1,134 @@ +package com.webank.wedatasphere.exchangis.engine.utils; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; +import java.util.zip.ZipOutputStream; + +/** + * Resource utils + */ +public class ResourceUtils { + + private static final Logger LOG = LoggerFactory.getLogger(ResourceUtils.class); + + private static final Integer BUFFER_SIZE = 2 * 1024; + + public static String normalizeFilePath(String path){ + return FilenameUtils.normalize(path); + } + + /** + * Combine the resources and packet + * @param resources resources + * @param outputStream output stream + * @throws IOException + */ + public static void combinePacket(EngineResource[] resources, OutputStream outputStream) throws IOException { + LOG.info("Start to combine the resources to packet file..."); + long startTime = System.currentTimeMillis(); + try(ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) { + for (EngineResource resource : resources) { + if (resource instanceof EngineLocalPathResource) { + packet(resource.getName(), ((EngineLocalPathResource) resource).getLocalFile().toPath(), + zipOutputStream); + } else { + packet(resource.getName(), resource.getInputStream(), zipOutputStream); + } + } + } + LOG.info("Success to combine the resources to packet file, taken: {}", System.currentTimeMillis() - startTime); + } + + public static void packet(Path source, Path target, boolean includeBaseDir) throws IOException { + + } + + public static void unPacket(Path source, Path target) throws IOException{ + if (Files.isRegularFile(source, LinkOption.NOFOLLOW_LINKS)){ + ZipFile zipFile = new ZipFile(source.toFile()); + InputStream inputStream = Files.newInputStream(source); + try(ZipInputStream zipInputStream = new ZipInputStream(inputStream)) { + ZipEntry zipEntry = null; + while (null != (zipEntry = zipInputStream.getNextEntry())) { + Path entryPath = target.resolve(zipEntry.getName()); + if (zipEntry.isDirectory()) { + if (!Files.isDirectory(entryPath)) { + Files.createDirectories(entryPath); + } + } else { + try (InputStream entryStream = zipFile.getInputStream(zipEntry)) { + try (OutputStream outputStream = Files.newOutputStream(entryPath, StandardOpenOption.CREATE_NEW)) { + byte[] buffer = new byte[BUFFER_SIZE]; + int pos = -1; + while ((pos = entryStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, pos); + } + outputStream.flush(); + } + } + } + } + } + } + } + + /** + * Packet path source + * @param name name + * @param source source path + * @param outputStream stream + * @throws IOException + */ + private static void packet(String name, Path source, ZipOutputStream outputStream) throws IOException { + if (Files.isDirectory(source, LinkOption.NOFOLLOW_LINKS)){ + name = name + IOUtils.DIR_SEPARATOR_UNIX; + // Accept empty directory + ZipEntry zipEntry = new ZipEntry(name); + outputStream.putNextEntry(zipEntry); + outputStream.closeEntry(); + for(Path child : Files.list(source).collect(Collectors.toList())) { + packet(name + child.toFile().getName(), child, outputStream); + } + } else if (Files.isRegularFile(source, LinkOption.NOFOLLOW_LINKS)){ + packet(name, Files.newInputStream(source), outputStream); + } + } + + /** + * Packet input stream + * @param name name + * @param inputStream input stream + * @param outputStream output stream + * @throws IOException + */ + private static void packet(String name, InputStream inputStream, ZipOutputStream outputStream) throws IOException{ + if (Objects.nonNull(inputStream)) { + ZipEntry zipEntry = new ZipEntry(name); + outputStream.putNextEntry(zipEntry); + byte[] buffer = new byte[BUFFER_SIZE]; + int pos = -1; + while ((pos = inputStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, pos); + } + outputStream.closeEntry(); + } + } +} diff --git a/exchangis-engines/exchangis-engine-server/pom.xml b/exchangis-engines/exchangis-engine-server/pom.xml new file mode 100644 index 000000000..5873fe538 --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/pom.xml @@ -0,0 +1,56 @@ + + + + exchangis-engines + com.webank.wedatasphere.exchangis + 1.1.2 + + 4.0.0 + + exchangis-engine-server + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${exchangis.version} + + + com.webank.wedatasphere.exchangis + exchangis-engine-core + ${exchangis.version} + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/ExchangisEngineAutoConfiguration.java b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/ExchangisEngineAutoConfiguration.java new file mode 100644 index 000000000..20ad739fb --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/ExchangisEngineAutoConfiguration.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.engine.server; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.dao.EngineSettingsDao; +import com.webank.wedatasphere.exchangis.engine.manager.ExchangisEngineManager; +import com.webank.wedatasphere.exchangis.engine.resource.DefaultEngineResourcePathScanner; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourcePathScanner; +import com.webank.wedatasphere.exchangis.engine.server.manager.SpringExchangisEngineManager; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Auto configure the beans in engine + */ +@Configuration +public class ExchangisEngineAutoConfiguration { + + @Bean + @ConditionalOnMissingBean(EngineResourcePathScanner.class) + public EngineResourcePathScanner resourcePathScanner(){ + return new DefaultEngineResourcePathScanner(); + } + + @Bean(initMethod = "init") + @ConditionalOnMissingBean(ExchangisEngineManager.class) + public ExchangisEngineManager engineManager(EngineResourceDao resourceDao, + EngineSettingsDao settingsDao, EngineResourcePathScanner scanner){ + return new SpringExchangisEngineManager(ExchangisEngineConfiguration.ENGINE_RESOURCE_ROOT_PATH.getValue(), + resourceDao, settingsDao, scanner); + } +} diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/manager/SpringExchangisEngineManager.java b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/manager/SpringExchangisEngineManager.java new file mode 100644 index 000000000..8fa6b88e4 --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/manager/SpringExchangisEngineManager.java @@ -0,0 +1,127 @@ +package com.webank.wedatasphere.exchangis.engine.server.manager; + +import com.webank.wedatasphere.exchangis.engine.ExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.GenericExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.dao.EngineSettingsDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; +import com.webank.wedatasphere.exchangis.engine.manager.DefaultExchangisEngineManager; +import com.webank.wedatasphere.exchangis.engine.resource.DefaultEngineResourceContainer; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourcePathScanner; +import com.webank.wedatasphere.exchangis.engine.resource.bml.BmlEngineResourceUploader; +import com.webank.wedatasphere.exchangis.engine.resource.loader.AbstractEngineLocalPathResourceLoader; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.utils.JsonUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +/** + * Engine manager of spring context + */ +public class SpringExchangisEngineManager extends DefaultExchangisEngineManager { + + private static final Logger LOG = LoggerFactory.getLogger(SpringExchangisEngineManager.class); + + /** + * Resource dao + */ + private final EngineResourceDao resourceDao; + + /** + * Settings dao + */ + private final EngineSettingsDao settingsDao; + + /** + * Resource root path + */ + private final String rootPath; + + private final EngineResourcePathScanner scanner; + + public SpringExchangisEngineManager(String rootPath, EngineResourceDao resourceDao, + EngineSettingsDao settingsDao, EngineResourcePathScanner scanner){ + this.rootPath = rootPath; + this.resourceDao = resourceDao; + this.settingsDao = settingsDao; + this.scanner = scanner; + } + + public void init(){ + List settingsList = this.settingsDao.getSettings(); + try { + String settingsJson = JsonUtils.jackson().writer().writeValueAsString(settingsList); + LOG.info("Engine settings: {}", settingsJson); + }catch(Exception e){ + //Ignore + } + settingsList.forEach(settings -> { + GenericExchangisEngine engine = new GenericExchangisEngine(); + engine.setSettings(settings); + AbstractEngineLocalPathResourceLoader loader = null; + BmlEngineResourceUploader uploader = null; + String loaderClassName = settings.getResourceLoaderClass(); + if (StringUtils.isNotBlank(loaderClassName)){ + try { + Class loaderClass = Class.forName(loaderClassName); + if (AbstractEngineLocalPathResourceLoader.class.isAssignableFrom(loaderClass)){ + loader = (AbstractEngineLocalPathResourceLoader) loaderClass.newInstance(); + this.scanner.registerResourceLoader(loader); + } else { + LOG.warn("Not allow the loader class: '{}' which does not implement '{}'", loaderClass, AbstractEngineLocalPathResourceLoader.class.getName()); + } + } catch (ClassNotFoundException e) { + LOG.warn("Cannot find the loader class: '{}' for engine [{}]", loaderClassName, engine.getName()); + } catch (InstantiationException | IllegalAccessException e) { + LOG.warn("Fail to instantiate the loader class: '{}'", loaderClassName, e); + } + } + String uploaderClassName = Optional.ofNullable(settings.getResourceUploaderClass()) + .orElse(BmlEngineResourceUploader.class.getCanonicalName()); + try { + Class uploaderClass = Class.forName(uploaderClassName); + if (BmlEngineResourceUploader.class.isAssignableFrom(uploaderClass)){ + uploader = (BmlEngineResourceUploader) uploaderClass.newInstance(); + } else { + LOG.warn("Not allow the uploader class: '{}' which does not implement '{}'", uploaderClass, + BmlEngineResourceUploader.class.getName()); + } + } catch (ClassNotFoundException e) { + LOG.warn("Cannot find the uploader class: '{}' for engine [{}]", uploaderClassName, engine.getName()); + } catch (InstantiationException | IllegalAccessException e) { + LOG.warn("Fail to instantiate the uploader class: '{}'", uploaderClassName, e); + } + EngineResourceContainer + resourceContainer = new DefaultEngineResourceContainer(engine.getName(), rootPath, resourceDao, loader, uploader); + LOG.info("Init engine resource container for engine: [{}]", engine.getName()); + resourceContainer.init(); + engine.setResourceContainer(resourceContainer); + engineContextMap.put(engine.getName(), engine); + }); + try { + // Start to scan and load local resources + Set localResources = this.scanner.doScan(this.rootPath); + localResources.forEach(resource -> Optional.ofNullable(engineContextMap.get(resource.getEngineType())) + .ifPresent(engine -> engine.getResourceContainer().addResource(resource.getPath(), resource))); + }catch (ExchangisEngineResException e){ + LOG.warn("Exception happened when scanning root path: [{}]", this.rootPath, e); + } + LOG.info("Flush all the resources in engine resource containers"); + for(Map.Entry entry : engineContextMap.entrySet()){ + try { + entry.getValue().getResourceContainer().flushAllResources(); + } catch (ExchangisEngineResException e) { + LOG.warn("Unable to flush the resources in container for engine: [{}]", entry.getValue().getName(), e); + } + } + } +} diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/EngineSettingsMapper.xml b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/EngineSettingsMapper.xml new file mode 100644 index 000000000..f8d7aac95 --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/EngineSettingsMapper.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/ProjectExportService.xml b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/ProjectExportService.xml new file mode 100644 index 000000000..71fee099e --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/ProjectExportService.xml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/exchangis-engines/pom.xml b/exchangis-engines/pom.xml new file mode 100644 index 000000000..7dfac3c51 --- /dev/null +++ b/exchangis-engines/pom.xml @@ -0,0 +1,31 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + 1.1.2 + + 4.0.0 + + exchangis-engines + pom + 1.1.2 + + + exchangis-engine-common + exchangis-engine-core + exchangis-engine-server + + engines/datax + + engineconn-plugins/sqoop + engineconn-plugins/datax + + + 8 + 8 + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-builder/pom.xml b/exchangis-job/exchangis-job-builder/pom.xml index 9b971105b..acfbb460a 100644 --- a/exchangis-job/exchangis-job-builder/pom.xml +++ b/exchangis-job/exchangis-job-builder/pom.xml @@ -5,7 +5,7 @@ exchangis-job com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -15,7 +15,7 @@ com.webank.wedatasphere.exchangis exchangis-job-common - 1.0.0 + 1.1.2 com.google.code.gson @@ -25,7 +25,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-service - 1.0.0 + 1.1.2 compile diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java index 9f8fa61d8..d2800991c 100644 --- a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java @@ -61,7 +61,7 @@ public E build(T inputJob, E expectOut, ExchangisJobBuilderContext ctx) throws E * Get current job builder context * @return */ - protected static ExchangisJobBuilderContext getCurrentBuilderContext(){ + public static ExchangisJobBuilderContext getCurrentBuilderContext(){ return contextThreadLocal.get(); } } diff --git a/exchangis-job/exchangis-job-common/pom.xml b/exchangis-job/exchangis-job-common/pom.xml index 8abdf342c..1bae2fa82 100644 --- a/exchangis-job/exchangis-job-common/pom.xml +++ b/exchangis-job/exchangis-job-common/pom.xml @@ -5,7 +5,7 @@ exchangis-job com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -15,13 +15,18 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 + 1.1.2 org.apache.linkis linkis-label-common ${linkis.version} + + com.webank.wedatasphere.exchangis + exchangis-engine-common + 1.1.2 + org.apache.linkis linkis-protocol diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java index 2354018ce..72d979504 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java @@ -1,17 +1,30 @@ package com.webank.wedatasphere.exchangis.job.domain; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; import com.webank.wedatasphere.exchangis.job.utils.MemUtils; -import java.util.HashMap; -import java.util.Map; +import java.util.*; /** * EngineJob */ public class ExchangisEngineJob extends GenericExchangisJob { + public ExchangisEngineJob(){ + } + + public ExchangisEngineJob(ExchangisEngineJob engineJob){ + if (Objects.nonNull(engineJob)) { + setName(engineJob.getName()); + setEngineType(engineJob.getEngineType()); + getJobContent().putAll(engineJob.getJobContent()); + getRuntimeParams().putAll(engineJob.getRuntimeParams()); + setMemoryUsed(engineJob.getMemoryUsed()); + getResources().addAll(engineJob.getResources()); + } + } /** * Job content */ @@ -29,11 +42,20 @@ public class ExchangisEngineJob extends GenericExchangisJob { private String memoryUnit = MemUtils.StoreUnit.MB.name(); + /** + * If lock the unit of memory + */ + private boolean memoryUnitLock = false; /** * Cpu used in engine job */ private Long cpuUsed; + /** + * Engine resources + */ + private List resources = new ArrayList<>(); + public Map getJobContent() { return jobContent; } @@ -50,6 +72,7 @@ public void setRuntimeParams(Map runtimeParams) { this.runtimeParams = runtimeParams; } + public Long getMemoryUsed() { return memoryUsed; } @@ -73,4 +96,20 @@ public String getMemoryUnit() { public void setMemoryUnit(String memoryUnit) { this.memoryUnit = memoryUnit; } + + public List getResources() { + return resources; + } + + public void setResources(List resources) { + this.resources = resources; + } + + public boolean isMemoryUnitLock() { + return memoryUnitLock; + } + + public void setMemoryUnitLock(boolean memoryUnitLock) { + this.memoryUnitLock = memoryUnitLock; + } } diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/OperationType.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/OperationType.java new file mode 100644 index 000000000..d8c97943c --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/OperationType.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +/** + * @author jefftlin + * @create 2022-09-15 + **/ +public enum OperationType { + + /** + * job operation: + * query project + */ + JOB_QUERY("JOB_QUERY"), + + /** + * job operation: + * create jpb + * update job info + * update job config + * update job content + * delete job + */ + JOB_ALTER("JOB_ALTER"), + + /** + * job operation: + * job execute + * job kill + * sub job delete + */ + JOB_EXECUTE("JOB_EXECUTE"), + + /** + * job operation: + * job release + */ + JOB_RELEASE("JOB_RELEASE"); + + private String type; + + OperationType(String type) { + this.type = type; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java index be89f6e07..1679e38ff 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java @@ -3,6 +3,8 @@ import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; @@ -26,12 +28,27 @@ public class SubExchangisJob extends GenericExchangisJob { public static final String REALM_JOB_CONTENT_SOURCE = "job.realm.content.source"; - public static final String REALM_JOB_COLUMN_MAPPING = "job.realm.column-mappings"; +// public static final String REALM_JOB_COLUMN_MAPPING = "job.realm.column-mappings"; /** * Realm params set */ - private Map realmParamSet = new ConcurrentHashMap<>(); + private final Map realmParamSet = new ConcurrentHashMap<>(); + + /** + * Source columns + */ + private final List sourceColumns = new ArrayList<>(); + + /** + * Sink columns + */ + private final List sinkColumns = new ArrayList<>(); + + /** + * Functions + */ + private final List columnFunctions = new ArrayList<>(); public String getSourceType() { return sourceType; @@ -49,7 +66,6 @@ public void setSinkType(String sinkType) { this.sinkType = sinkType; } - /** * Add * @param realm realm info @@ -93,5 +109,109 @@ public Map getParamsToMap(boolean isTemp){ .collect(Collectors.toMap(JobParam::getStrKey, JobParam::getValue, (left, right) -> right)); } + public List getSourceColumns() { + return sourceColumns; + } + + public List getSinkColumns() { + return sinkColumns; + } + + public List getColumnFunctions() { + return columnFunctions; + } + /** + * Column definition + */ + public static class ColumnDefine{ + + /** + * Column name + */ + private String name; + + /** + * Column type + */ + private String type; + /** + * Column index + */ + private Integer index; + + public ColumnDefine(){ + + } + + public ColumnDefine(String name, String type, Integer index){ + this.name = name; + this.type = type; + this.index = index; + } + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + } + + /** + * Column function + */ + public static class ColumnFunction{ + + private Integer index; + /** + * Function name + */ + private String name; + + /** + * Function params + */ + private List params = new ArrayList<>(); + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getParams() { + return params; + } + + public void setParams(List params) { + this.params = params; + } + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + } } diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java index 055cbc8c5..341b4bf2e 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java @@ -3,6 +3,7 @@ import java.util.Objects; import java.util.function.BiFunction; +import java.util.function.Function; /** * Definition of job params @@ -21,6 +22,16 @@ public class JobParamDefine{ this.key = key; this.valueLoader = (BiFunction)valueLoader; } + JobParamDefine(String key, Function valueLoader){ + this.key = key; + this.valueLoader = (s, paramSet) -> valueLoader.apply((JobParamSet) paramSet); + } + + @SuppressWarnings("unchecked") + JobParamDefine(String key, Function valueLoader, Class clazz){ + this.key = key; + this.valueLoader = (s, paramSet) -> valueLoader.apply((U) paramSet); + } public String getKey() { return key; diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java index e81470598..6d7480251 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java @@ -32,6 +32,13 @@ public JobParamSet addNonNull(JobParamDefine jobParamDefine){ } return null; } + + public JobParamSet addNonNull(JobParam jobParam){ + if (Objects.nonNull(jobParam.getValue())) { + jobParamStore.put(jobParam.getStrKey(), jobParam); + } + return this; + } /** * Append * @param key custom key diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java index 5f8047092..a19256c7c 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java @@ -1,10 +1,10 @@ package com.webank.wedatasphere.exchangis.job.domain.params; -import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.Supplier; /** @@ -24,6 +24,14 @@ public static JobParamDefine define(String key, BiFunction(key, valueLoader); } + public static JobParamDefine define(String key, Function valueLoader){ + return new JobParamDefine<>(key, valueLoader); + } + + public static JobParamDefine define(String key, Function valueLoader, Class type){ + return new JobParamDefine<>(key, valueLoader, type); + } + @SuppressWarnings({"unchecked", "rawtypes"}) public static JobParamDefine define(String key){ return new JobParamDefine<>(key, (paramKey, source) -> { @@ -39,50 +47,41 @@ public static JobParamDefine define(String key){ }); } - @SuppressWarnings({"unchecked", "rawtypes"}) - public static JobParamDefine define(String key, Class valueType){ - return new JobParamDefine<>(key,(paramKey, source) -> { - if(Objects.nonNull(source)){ - if(source instanceof JobParamSet) { - JobParam result = ((JobParamSet)source).get(key); - return Objects.nonNull(result)? (T)result.getValue() : null; - }else if (source instanceof Map){ - return (T) ((Map)source).get(key); - } - } - return null; - }); - } + /** * Use default value loader: (string, JobParamSet) -> ? * @param key key * @param mappingKey mapping key * @return */ - @SuppressWarnings({"unchecked", "rawtypes"}) - public static JobParamDefine define(String key, String mappingKey){ - return new JobParamDefine<>(key, (paramKey, source) -> { - if(Objects.nonNull(source)){ - if(source instanceof JobParamSet) { - JobParam result = ((JobParamSet)source).remove(mappingKey); - return Objects.nonNull(result)? (U)result.getValue() : null; - }else if (source instanceof Map){ - return (U) ((Map)source).remove(mappingKey); - } - } - return null; - }); + public static JobParamDefine define(String key, String mappingKey){ + return define(key, new String[]{mappingKey}, result-> result, (Class)null); + } + + public static JobParamDefine define(String key, String mappingKey, Function transform, Class inputType){ + return define(key, new String[]{mappingKey}, transform, inputType); } @SuppressWarnings({"unchecked", "rawtypes"}) - public static JobParamDefine define(String key, String mappingKey, Class valueType){ - return new JobParamDefine(key, (paramKey, source) -> { - if(Objects.nonNull(source)){ - if(source instanceof JobParamSet) { - JobParam result = ((JobParamSet)source).remove(mappingKey); - return Objects.nonNull(result)? (T)result.getValue() : null; - }else if (source instanceof Map){ - return (T) ((Map)source).remove(mappingKey); + private static JobParamDefine define(String key, String[] mappingKeys, Function transform, Class inputType){ + return new JobParamDefine<>(key, (paramKey, source) -> { + if (Objects.nonNull(source)) { + if (source instanceof JobParamSet) { + for (String mappingKey : mappingKeys) { + JobParam result = ((JobParamSet) source).remove(mappingKey); + if (Objects.nonNull(result)) { + return transform.apply((U)result.getValue()); + } + } + return null; + } else if (source instanceof Map) { + for (String mappingKey : mappingKeys) { + Object result = ((Map) source).remove(mappingKey); + if (Objects.nonNull(result)) { + return transform.apply((U)result); + } + } + return null; } } return null; @@ -96,8 +95,25 @@ public static JobParamDefine define(String key, String mappingKey, Class * @return */ + @SuppressWarnings({"unchecked", "rawtypes"}) public static JobParamDefine define(String key, Supplier operator){ - return new JobParamDefine<>(key, (paramKey, source) -> operator.get()); + return new JobParamDefine<>(key, (paramKey, source) -> { + T finalValue = null; + if (Objects.nonNull(source)) { + if (source instanceof JobParamSet) { + JobParam result = ((JobParamSet) source).get(key); + if (Objects.nonNull(result)){ + finalValue = (T)result.getValue(); + } + } else if (source instanceof Map) { + Object result = ((Map) source).get(key); + if (Objects.nonNull(result)){ + return (T)result; + } + } + } + return Objects.nonNull(finalValue) ? finalValue : operator.get(); + }); } public static JobParam newOne(String key, T value){ diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java index 89902f901..a6ccf9f51 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java @@ -1,5 +1,5 @@ package com.webank.wedatasphere.exchangis.job.enums; public enum EngineTypeEnum { - DATAX, SQOOP, DICTCOPY; + DATAX, SQOOP, DICTCOPY, NONE } diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java index 9e08768be..fd1609ccd 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java @@ -4,6 +4,7 @@ * Exception code, range:(31000 ~ 31999), the same as "ExchangisDataSourceExceptionCode" */ public enum ExchangisJobExceptionCode { + RENDER_TRANSFORM_ERROR(31885), METRICS_OP_ERROR(31886), TASK_LAUNCH_NOT_EXIST(31887), TASK_LAUNCH_ERROR(31888), @@ -13,12 +14,13 @@ public enum ExchangisJobExceptionCode { TASK_OBSERVER_ERROR(31992), ON_EVENT_ERROR(31993), SCHEDULER_ERROR(31994), - TASK_BUILDER_ERROR(31995), + JOB_BUILDER_ERROR(31995), UNSUPPORTED_TASK_LAUNCH_ENGINE(31996), TASK_GENERATE_ERROR(31997), JOB_EXCEPTION_CODE(31999), - ENGINE_JOB_ERROR(31998), - TRANSFORM_JOB_ERROR(31998); + BUILDER_ENGINE_ERROR(31998), + BUILDER_TRANSFORM_ERROR(31998), + UNSUPPORTED_OPERATION(31999); private int code; ExchangisJobExceptionCode(int code) { diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java index d0ba5d4f9..3806e8c44 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java @@ -4,8 +4,15 @@ public class ExchangisJobQueryVo extends ExchangisJobPageQuery { + private static final Integer defaultCurrentPage = 1; + + private static final Integer defaultPageSize = 10; + public ExchangisJobQueryVo(){ + } + public ExchangisJobQueryVo(Long projectId, String jobType, String name) { + this(projectId, jobType, name, defaultCurrentPage, defaultPageSize); } public ExchangisJobQueryVo(Long projectId, String jobType, diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java index b97f77ad3..6bc16b717 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java @@ -3,7 +3,7 @@ import com.webank.wedatasphere.exchangis.common.validator.groups.InsertGroup; -import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; import org.hibernate.validator.constraints.NotBlank; import javax.validation.constraints.NotNull; @@ -115,20 +115,21 @@ public ExchangisJobVo(){ } - public ExchangisJobVo(ExchangisJobInfo jobInfo){ - if (Objects.nonNull(jobInfo)) { - this.id = jobInfo.getId(); - this.engineType = jobInfo.getEngineType(); - this.jobDesc = jobInfo.getJobDesc(); - this.jobLabels = jobInfo.getJobLabel(); - this.jobName = jobInfo.getName(); - this.jobType = jobInfo.getJobType(); - this.createTime = jobInfo.getCreateTime(); - this.createUser = jobInfo.getCreateUser(); - this.modifyTime = jobInfo.getLastUpdateTime(); - this.jobParams = jobInfo.getJobParams(); - this.executeUser = jobInfo.getExecuteUser(); - this.proxyUser = jobInfo.getExecuteUser(); + public ExchangisJobVo(ExchangisJobEntity jobEntity){ + if (Objects.nonNull(jobEntity)) { + this.id = jobEntity.getId(); + this.projectId = jobEntity.getProjectId(); + this.engineType = jobEntity.getEngineType(); + this.jobDesc = jobEntity.getJobDesc(); + this.jobLabels = jobEntity.getJobLabel(); + this.jobName = jobEntity.getName(); + this.jobType = jobEntity.getJobType(); + this.createTime = jobEntity.getCreateTime(); + this.createUser = jobEntity.getCreateUser(); + this.modifyTime = jobEntity.getLastUpdateTime(); + this.jobParams = jobEntity.getJobParams(); + this.executeUser = jobEntity.getExecuteUser(); + this.proxyUser = jobEntity.getExecuteUser(); } } diff --git a/exchangis-job/exchangis-job-launcher/pom.xml b/exchangis-job/exchangis-job-launcher/pom.xml index 41bb083b9..2375cdea3 100644 --- a/exchangis-job/exchangis-job-launcher/pom.xml +++ b/exchangis-job/exchangis-job-launcher/pom.xml @@ -5,7 +5,7 @@ exchangis-job com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -15,12 +15,12 @@ com.webank.wedatasphere.exchangis exchangis-job-common - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis exchangis-job-builder - 1.0.0 + 1.1.2 org.apache.linkis diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java index 3ba907a48..4f3c714b6 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java @@ -14,6 +14,12 @@ public class ExchangisLauncherConfiguration { public static final String LAUNCHER_LINKIS_REQUEST_MEMORY = "wds.linkis.engineconn.java.driver.memory"; + public static final String LAUNCHER_LINKIS_RESOURCES = "wds.linkis.engineconn.${engine}.bml.resources"; + + public static final String LAUNCHER_LINKIS_EXEC_ID = "wds.linkis.engineconn.${engine}.execution.id"; + + public static final String LAUNCHER_LINKIS_CUSTOM_PARAM_PREFIX = "_${engine}_."; + public static final CommonVars LAUNCHER_LINKIS_CREATOR = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.creator", "exchangis"); public static final CommonVars LAUNCHER_LINKIS_ENGINE_CONN_MODE = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.engineConn.mode", "once"); @@ -22,7 +28,6 @@ public class ExchangisLauncherConfiguration { public static final CommonVars LAUNCHER_LINKIS_MAX_ERROR = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.max.error", 3); - public static final CommonVars LINKIS_SERVER_URL = CommonVars.apply("wds.exchangis.client.linkis.server-url", "http://127.0.0.1:9001"); + public static final CommonVars LIMIT_INTERFACE = CommonVars.apply("wds.exchangis.limit.interface.value", true); - public static final CommonVars LINKIS_TOKEN_VALUE = CommonVars.apply("wds.exchangis.client.linkis.token.value", "EXCHANGIS-TOKEN"); } diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java index 8b6fda944..82386557d 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java @@ -1,16 +1,22 @@ package com.webank.wedatasphere.exchangis.job.launcher.builder; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; import com.webank.wedatasphere.exchangis.job.utils.MemUtils; +import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException; +import org.apache.linkis.datasourcemanager.common.util.PatternInjectUtils; +import org.apache.linkis.datasourcemanager.common.util.json.Json; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; +import java.util.*; +import java.util.stream.Collectors; +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_EXECUTE_ERROR; import static com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration.*; /** @@ -21,8 +27,11 @@ public class LinkisExchangisLauncherJobBuilder extends AbstractExchangisJobBuild private static final String LAUNCHER_NAME = "Linkis"; + private static final Logger LOG = LoggerFactory.getLogger(LinkisExchangisLauncherJobBuilder.class); + @Override public LaunchableExchangisTask buildJob(ExchangisEngineJob inputJob, LaunchableExchangisTask expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + String engine = inputJob.getEngineType().toLowerCase(Locale.ROOT); LaunchableExchangisTask launchableTask = new LaunchableExchangisTask(); launchableTask.setName(inputJob.getName()); launchableTask.setJobId(inputJob.getId()); @@ -31,11 +40,29 @@ public LaunchableExchangisTask buildJob(ExchangisEngineJob inputJob, LaunchableE launchableTask.setLinkisContentMap(inputJob.getJobContent()); Map linkisParams = new HashMap<>(); Map startUpParams = new HashMap<>(); - linkisParams.put(LAUNCHER_LINKIS_RUNTIME_PARAM_NAME, inputJob.getRuntimeParams()); linkisParams.put(LAUNCHER_LINKIS_STARTUP_PARAM_NAME, startUpParams); - long memoryUsed = Objects.nonNull(inputJob.getMemoryUsed())? MemUtils.convertToGB(inputJob.getMemoryUsed(), - inputJob.getMemoryUnit()) : 0; - startUpParams.put(LAUNCHER_LINKIS_REQUEST_MEMORY, String.valueOf(memoryUsed <= 0 ? 1 : memoryUsed)); + try { + String customParamPrefix = PatternInjectUtils.inject(LAUNCHER_LINKIS_CUSTOM_PARAM_PREFIX, new String[]{engine}); + // Add the runtime params to startup params for once job + startUpParams.putAll(appendPrefixToParams(customParamPrefix, inputJob.getRuntimeParams())); + } catch (JsonErrorException e) { + throw new ExchangisJobException(TASK_EXECUTE_ERROR.getCode(), "Fail to convert custom params for launching", e); + } + long memoryUsed = Optional.ofNullable(inputJob.getMemoryUsed()).orElse(0L); + if (!inputJob.isMemoryUnitLock() && memoryUsed > 0){ + memoryUsed = MemUtils.convertToGB(inputJob.getMemoryUsed(), inputJob.getMemoryUnit()); + inputJob.setMemoryUnit("G"); + } + startUpParams.put(LAUNCHER_LINKIS_REQUEST_MEMORY, (memoryUsed <= 0 ? 1 : memoryUsed) + inputJob.getMemoryUnit()); + List resources = inputJob.getResources(); + if (!resources.isEmpty()){ + try { + LOG.info("Use the engine resources: {} for job/task: [{}]", Json.toJson(resources, null), inputJob.getName()); + startUpParams.put(PatternInjectUtils.inject(LAUNCHER_LINKIS_RESOURCES, new String[]{engine}), Json.toJson(resources, null)); + } catch (JsonErrorException e) { + throw new ExchangisJobException(TASK_EXECUTE_ERROR.getCode(), "Fail to use engine resources", e); + } + } launchableTask.setLinkisParamsMap(linkisParams); launchableTask.setEngineType(inputJob.getEngineType()); launchableTask.setLabels(inputJob.getJobLabel()); @@ -44,4 +71,15 @@ public LaunchableExchangisTask buildJob(ExchangisEngineJob inputJob, LaunchableE launchableTask.setLinkisJobName(LAUNCHER_NAME); return launchableTask; } + + /** + * Append prefix to params + * @param prefix prefix + * @param customParams custom params + * @return params + */ + private Map appendPrefixToParams(String prefix, Map customParams){ + return customParams.entrySet().stream().collect(Collectors.toMap(entry -> prefix + entry.getKey(), + Map.Entry::getValue)); + } } diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java index d719b8c67..d5ef7e8e0 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java @@ -1,19 +1,29 @@ package com.webank.wedatasphere.exchangis.job.launcher.linkis; -import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.common.linkis.ClientConfiguration; import com.webank.wedatasphere.exchangis.job.enums.EngineTypeEnum; import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; -import com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration; import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLauncher; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; import org.apache.commons.lang.StringUtils; -import org.apache.linkis.computation.client.LinkisJobBuilder; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.conf.Configuration$; +import org.apache.linkis.common.exception.LinkisRetryException; +import org.apache.linkis.common.utils.DefaultRetryHandler; +import org.apache.linkis.common.utils.RetryHandler; import org.apache.linkis.computation.client.LinkisJobClient; +import org.apache.linkis.computation.client.LinkisJobClient$; +import org.apache.linkis.httpclient.config.ClientConfig; +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; +import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder; +import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder$; import java.util.*; +import java.util.concurrent.TimeUnit; /** * Linkis task launcher @@ -35,8 +45,25 @@ public String name() { public void init(ExchangisTaskLaunchManager jobLaunchManager) { this.engineVersions.put(EngineTypeEnum.SQOOP.name().toLowerCase(), "1.4.6"); this.engineVersions.put(EngineTypeEnum.DATAX.name().toLowerCase(), "3.0.0"); - LinkisJobClient.config().setDefaultAuthToken(ExchangisLauncherConfiguration.LINKIS_TOKEN_VALUE.getValue()); - LinkisJobClient.config().setDefaultServerUrl(ExchangisLauncherConfiguration.LINKIS_SERVER_URL.getValue()); + RetryHandler retryHandler = new DefaultRetryHandler(){}; + retryHandler.addRetryException(LinkisRetryException.class); + ClientConfig clientConfig = DWSClientConfigBuilder$.MODULE$ + .newBuilder() + .setDWSVersion(Configuration.LINKIS_WEB_VERSION().getValue()) + .addServerUrl(ClientConfiguration.LINKIS_SERVER_URL.getValue()) + .connectionTimeout(45000) + .discoveryEnabled(false) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(false) + .maxConnectionSize(ClientConfiguration.LINKIS_DEFAULT_MAX_CONNECTIONS.getValue()) + .retryEnabled(true) + .setRetryHandler(retryHandler) + .readTimeout(90000) // We think 90s is enough, if SocketTimeoutException is throw, just set a new clientConfig to modify it. + .setAuthenticationStrategy(new TokenAuthenticationStrategy()) + .setAuthTokenKey(TokenAuthenticationStrategy.TOKEN_KEY()) + .setAuthTokenValue(ClientConfiguration.LINKIS_TOKEN_VALUE.getValue()) + .build(); + LinkisJobClient$.MODULE$.config().setDefaultClientConfig((DWSClientConfig) clientConfig); } @Override diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java index 19c0e3a5f..408fdc43b 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java @@ -17,6 +17,8 @@ import org.apache.linkis.computation.client.once.simple.SubmittableSimpleOnceJob; import org.apache.linkis.computation.client.operator.impl.*; import org.apache.linkis.computation.client.utils.LabelKeyUtils; +import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException; +import org.apache.linkis.datasourcemanager.common.util.PatternInjectUtils; import org.apache.linkis.protocol.engine.JobProgressInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -122,15 +124,22 @@ public TaskStatus getStatus() throws ExchangisTaskLaunchException { String linkisJobStatus = this.onceJob.getStatus(this.jobInfo); if ("success".equalsIgnoreCase(linkisJobStatus)) { this.status = TaskStatus.Success; - } else if ("failed".equalsIgnoreCase(linkisJobStatus)) { + } else if ("failed".equalsIgnoreCase(linkisJobStatus)){ this.status = TaskStatus.Failed; + } else if ("shuttingdown".equalsIgnoreCase(linkisJobStatus)) { + LOG.warn("Will retry on linkis job status: [{}]", linkisJobStatus); + // Retry on shutting down status + this.status = TaskStatus.WaitForRetry; } else { this.status = TaskStatus.Running; } + // Init the error count + this.reqError.set(0); } catch (Exception e){ try { dealException(e); } catch (ExchangisTaskNotExistException ne){ + LOG.warn("Not find the launcher task in exchangis", e); this.status = TaskStatus.Failed; } } @@ -149,7 +158,10 @@ public Map getMetricsInfo() throws ExchangisTaskLaunchException try{ // Invoke getStatus() to get real time status if(!TaskStatus.isCompleted(getStatus())){ - return (Map)this.metricsOperator.apply(); + Map metrics = (Map)this.metricsOperator.apply(); + // Init the error count + this.reqError.set(0); + return metrics; } }catch(Exception e){ dealException(e); @@ -182,6 +194,8 @@ public TaskProgressInfo getProgressInfo() throws ExchangisTaskLaunchException { } this.progressInfo.setProgress(1.0f); } + // Init the error count + this.reqError.set(0); } catch(Exception e){ dealException(e); } @@ -195,6 +209,8 @@ public void kill() throws ExchangisTaskLaunchException { try{ this.onceJob.kill(); this.status = TaskStatus.Cancelled; + // Init the error count + this.reqError.set(0); }catch(Exception e){ dealException(e); } @@ -221,6 +237,8 @@ public LogResult queryLogs(LogQuery query) throws ExchangisTaskLaunchException { if (isEnd){ isEnd = TaskStatus.isCompleted(getStatus()); } + // Init the error count + this.reqError.set(0); return new LogResult(logs.endLine(), isEnd, logs.logs()); } catch (Exception e){ dealException(e); @@ -237,6 +255,10 @@ public synchronized void submit() throws ExchangisTaskLaunchException { } try { ((SubmittableOnceJob) this.onceJob).submit(); + TaskStatus status = getStatus(); + if (status == TaskStatus.Undefined || status == TaskStatus.WaitForRetry){ + throw new ExchangisTaskLaunchException("Fail to submit to linkis server with unexpected final status: [" + status + "]", null); + } // New the operators for job prepareOperators(this.onceJob); Map jobInfo = getJobInfo(false); @@ -272,6 +294,12 @@ private SimpleOnceJob toSubmittableJob(LaunchableExchangisTask task){ jobBuilder.setStartupParams((Map) startupParams); } }); + try { + jobBuilder.addStartupParam(PatternInjectUtils.inject(LAUNCHER_LINKIS_EXEC_ID, + new String[]{task.getEngineType().toLowerCase(Locale.ROOT)}), task.getId()); + } catch (JsonErrorException e) { + //Ignore + } return jobBuilder.build(); } diff --git a/exchangis-job/exchangis-job-metrics/pom.xml b/exchangis-job/exchangis-job-metrics/pom.xml index 5b1c569fb..9dcdd12fb 100644 --- a/exchangis-job/exchangis-job-metrics/pom.xml +++ b/exchangis-job/exchangis-job-metrics/pom.xml @@ -5,7 +5,7 @@ exchangis-job com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 diff --git a/exchangis-job/exchangis-job-server/pom.xml b/exchangis-job/exchangis-job-server/pom.xml index 6751bc6c1..60c64165e 100644 --- a/exchangis-job/exchangis-job-server/pom.xml +++ b/exchangis-job/exchangis-job-server/pom.xml @@ -5,7 +5,7 @@ exchangis-job com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -17,22 +17,25 @@ - com.webank.wedatasphere.exchangis - exchangis-job-common - 1.0.0 + exchangis-project-provider + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-job-launcher + 1.1.2 - com.webank.wedatasphere.exchangis exchangis-datasource-service - 1.0.0 + 1.1.2 com.webank.wedatasphere.exchangis - exchangis-job-launcher - 1.0.0 + exchangis-engine-core + ${exchangis.version} @@ -46,12 +49,6 @@ mysql-connector-java 5.1.49 - - com.webank.wedatasphere.exchangis - exchangis-project-server - 1.0.0 - compile - diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java index 6faa97ceb..0c3391d94 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java @@ -1,6 +1,5 @@ package com.webank.wedatasphere.exchangis.job.server; -import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; import com.webank.wedatasphere.exchangis.job.builder.manager.ExchangisJobBuilderManager; import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; @@ -22,13 +21,14 @@ import com.webank.wedatasphere.exchangis.job.server.log.DefaultRpcJobLogger; import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; import com.webank.wedatasphere.exchangis.job.server.log.service.LocalSimpleJobLogService; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; import org.apache.linkis.scheduler.Scheduler; import org.apache.linkis.scheduler.executer.ExecutorManager; import org.apache.linkis.scheduler.queue.ConsumerManager; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.DependsOn; import java.util.List; import java.util.Objects; @@ -38,6 +38,7 @@ * Auto configure the beans in job execution */ @Configuration +@DependsOn("springContextHolder") public class ExchangisJobExecuteAutoConfiguration { @Bean @@ -64,9 +65,8 @@ public ExchangisJobBuilderManager jobBuilderManager(){ @Bean @ConditionalOnMissingBean(TaskGeneratorContext.class) - public TaskGeneratorContext taskGeneratorContext(JobLogListener jobLogListener, - MetadataInfoService metadataInfoService){ - return new DefaultTaskGeneratorContext(jobLogListener, metadataInfoService); + public TaskGeneratorContext taskGeneratorContext(JobLogListener jobLogListener){ + return new SpringTaskGeneratorContext(jobLogListener, SpringContextHolder.getApplicationContext()); } /** diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobRenderAutoConfiguration.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobRenderAutoConfiguration.java new file mode 100644 index 000000000..43af16c15 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobRenderAutoConfiguration.java @@ -0,0 +1,83 @@ +package com.webank.wedatasphere.exchangis.job.server; + +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformRuleDao; +import com.webank.wedatasphere.exchangis.job.server.render.transform.*; +import com.webank.wedatasphere.exchangis.job.server.render.transform.def.DefaultTransformDefineRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.DefaultFieldMappingRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.FieldMappingRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.FieldMappingTransformer; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.*; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.ProcessorTransformer; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.DependsOn; + +import java.util.List; +import java.util.Optional; + +@Configuration +@DependsOn("springContextHolder") +public class ExchangisJobRenderAutoConfiguration { + + /** + * Field match strategy factory + * @return factory + */ + @Bean + @ConditionalOnMissingBean(FieldMatchNamedStrategyFactory.class) + public FieldMatchStrategyFactory matchStrategyFactory(){ + FieldMatchNamedStrategyFactory namedStrategyFactory = new FieldMatchNamedStrategyFactory(); + namedStrategyFactory.registerStrategy(FieldAllMatchStrategy.ALL_MATCH, new FieldAllMatchStrategy()); + namedStrategyFactory.registerStrategy(FieldAllMatchIgnoreCaseStrategy.ALL_MATCH_IGNORE_CASE, new FieldAllMatchIgnoreCaseStrategy()); + namedStrategyFactory.registerStrategy(FieldCamelCaseMatchStrategy.CAMEL_CASE_MATCH, new FieldCamelCaseMatchStrategy()); + return namedStrategyFactory; + } + /** + * Field mapping rule fusion + * @param strategyFactory match strategy factory + * @return rule fusion + */ + @Bean + @ConditionalOnMissingBean(FieldMappingRulesFusion.class) + public FieldMappingRulesFusion fieldMappingRulesFusion(FieldMatchStrategyFactory strategyFactory){ + return new DefaultFieldMappingRulesFusion(strategyFactory); + } + /** + * Transform definition rule fusion + * @return fusion + */ + @Bean + public TransformRulesFusion defineRuleFusion(){ + return new DefaultTransformDefineRulesFusion(); + } + /** + * Field mapping transformer + * @param rulesFusion rule fusion + * @param transformRuleDao transform rule dao + * @return transformer + */ + @Bean + public FieldMappingTransformer fieldMappingTransformer(FieldMappingRulesFusion rulesFusion, JobTransformRuleDao transformRuleDao){ + return new FieldMappingTransformer(rulesFusion, transformRuleDao); + } + /** + * Processor transformer + * @return transformer + */ + @Bean + public ProcessorTransformer processorTransformer(){ + return new ProcessorTransformer(); + } + + @Bean + @ConditionalOnMissingBean(TransformerContainer.class) + public TransformerContainer transformerContainer(List transformers){ + TransformerContainer container = new DefaultTransformContainer(); + Optional.ofNullable(transformers).ifPresent(elements -> { + elements.forEach(element -> container.registerTransformer(element.name(), element)); + }); + return container; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractLoggingExchangisJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/AbstractLoggingExchangisJobBuilder.java similarity index 53% rename from exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractLoggingExchangisJobBuilder.java rename to exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/AbstractLoggingExchangisJobBuilder.java index 26b1bc672..d2dbba1da 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractLoggingExchangisJobBuilder.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/AbstractLoggingExchangisJobBuilder.java @@ -1,29 +1,30 @@ -package com.webank.wedatasphere.exchangis.job.server.builder.engine; +package com.webank.wedatasphere.exchangis.job.server.builder; import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; -import com.webank.wedatasphere.exchangis.job.server.builder.ServiceInExchangisJobBuilderContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstract implement for engine job builder */ public abstract class AbstractLoggingExchangisJobBuilder extends AbstractExchangisJobBuilder { - + private static final Logger LOG = LoggerFactory.getLogger(AbstractLoggingExchangisJobBuilder.class); /** * Get builder context * @return context * @throws ExchangisJobException.Runtime exception */ - protected static ServiceInExchangisJobBuilderContext getServiceInBuilderContext() throws ExchangisJobException.Runtime{ + protected static SpringExchangisJobBuilderContext getSpringBuilderContext() throws ExchangisJobException.Runtime{ ExchangisJobBuilderContext context = getCurrentBuilderContext(); - if (!(context instanceof ServiceInExchangisJobBuilderContext)) { - throw new ExchangisJobException.Runtime(-1, "The job builder context cannot not be casted to " + ServiceInExchangisJobBuilderContext.class.getCanonicalName(), null); + if (!(context instanceof SpringExchangisJobBuilderContext)) { + throw new ExchangisJobException.Runtime(-1, "The job builder context cannot not be casted to " + SpringExchangisJobBuilderContext.class.getCanonicalName(), null); } - return (ServiceInExchangisJobBuilderContext)context; + return (SpringExchangisJobBuilderContext)context; } /** @@ -31,11 +32,11 @@ protected static ServiceInExchangisJobBuilderContext getServiceInBuilderContext( * @param message message */ public static void warn(String message, Object... args){ - getServiceInBuilderContext().getLogging().warn(null, message, args); + getSpringBuilderContext().getLogging().warn(null, message, args); } public static void warn(String message, Throwable t){ - getServiceInBuilderContext().getLogging().warn(null, message, t); + getSpringBuilderContext().getLogging().warn(null, message, t); } /** @@ -43,10 +44,20 @@ public static void warn(String message, Throwable t){ * @param message message */ public static void info(String message, Object... args){ - getServiceInBuilderContext().getLogging().info(null, message, args); + getSpringBuilderContext().getLogging().info(null, message, args); } public static void info(String message, Throwable t){ - getServiceInBuilderContext().getLogging().info(null, message, t); + getSpringBuilderContext().getLogging().info(null, message, t); + } + + /** + * Get bean in spring context + * @param beanClass bean class + * @param + * @return + */ + public static T getBean(Class beanClass){ + return getSpringBuilderContext().getBean(beanClass); } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java index b5a65807d..dd79b28c2 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java @@ -5,6 +5,10 @@ */ public class JobParamConstraints { + public static final String ENCODING = "encoding"; + + public static final String NULL_FORMAT = "nullFormat"; + public static final String DATA_SOURCE_ID = "data_source_id"; public static final String USERNAME = "username"; @@ -21,6 +25,8 @@ public class JobParamConstraints { public static final String PORT = "port"; + public static final String SERVICE_NAME = "instance"; + public static final String WHERE = "where"; public static final String WRITE_MODE = "writeMode"; diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/ServiceInExchangisJobBuilderContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderContext.java similarity index 77% rename from exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/ServiceInExchangisJobBuilderContext.java rename to exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderContext.java index 2f73f7ca0..ad5811a7b 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/ServiceInExchangisJobBuilderContext.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderContext.java @@ -9,18 +9,15 @@ import com.webank.wedatasphere.exchangis.job.server.log.JobServerLogging; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContext; import java.util.Objects; /** * Service in job builder context */ -public class ServiceInExchangisJobBuilderContext extends ExchangisJobBuilderContext { +public class SpringExchangisJobBuilderContext extends ExchangisJobBuilderContext { - /** - * Meta info service - */ - private MetadataInfoService metadataInfoService; /** * Job execution id @@ -32,9 +29,12 @@ public class ServiceInExchangisJobBuilderContext extends ExchangisJobBuilderCont */ private JobServerLogging> logging; - public ServiceInExchangisJobBuilderContext(ExchangisJobInfo originalJob, - JobLogListener jobLogListener) { + private ApplicationContext applicationContext; + + public SpringExchangisJobBuilderContext(ExchangisJobInfo originalJob, + ApplicationContext applicationContext, JobLogListener jobLogListener) { super(originalJob); + this.applicationContext = applicationContext; this.logging = new JobServerLogging>() { @Override public Logger getLogger() { @@ -62,12 +62,8 @@ public void setJobExecutionId(String jobExecutionId) { this.jobExecutionId = jobExecutionId; } - public MetadataInfoService getMetadataInfoService() { - return metadataInfoService; - } - - public void setMetadataInfoService(MetadataInfoService metadataInfoService) { - this.metadataInfoService = metadataInfoService; + public T getBean(Class requiredType) { + return applicationContext.getBean(requiredType); } public JobServerLogging> getLogging() { diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractResourceEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractResourceEngineJobBuilder.java new file mode 100644 index 000000000..57edfeade --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractResourceEngineJobBuilder.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.manager.ExchangisEngineManager; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.builder.AbstractLoggingExchangisJobBuilder; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Engine job builder with engine resources + */ +public abstract class AbstractResourceEngineJobBuilder extends AbstractLoggingExchangisJobBuilder { + + /** + * Get resources + * @return path + */ + protected List getResources(String engine, String[] paths){ + EngineResourceContainer resourceContainer = + getBean(ExchangisEngineManager.class).getResourceContainer(engine); + List resources = new ArrayList<>(); + if (Objects.nonNull(resourceContainer)){ + for(String path : paths){ + if (StringUtils.isNotBlank(path)) { + Optional.ofNullable(resourceContainer.getRemoteResource(path)) + .ifPresent(resources::add); + } + } + } + return resources; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java index 89934dd2e..6741cc07f 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java @@ -2,19 +2,28 @@ import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import java.util.Objects; + /** * Datax engine job */ public class DataxExchangisEngineJob extends ExchangisEngineJob { - private static final String CODE_NAME = "code"; - + private static final String CODE_NAME = "job"; - public String getCode() { - return String.valueOf(super.getJobContent().get(CODE_NAME)); + public DataxExchangisEngineJob(ExchangisEngineJob engineJob){ + super(engineJob); + } + @SuppressWarnings({"unchecked"}) + public T getCode(Class type) { + Object code = super.getJobContent().get(CODE_NAME); + if (Objects.nonNull(code) && type.isAssignableFrom(code.getClass())){ + return (T)code; + } + return null; } - public void setCode(String code) { + public void setCode(Object code) { super.getJobContent().put(CODE_NAME, code); } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java index 97026d83e..9132100cf 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java @@ -2,130 +2,101 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceConf; import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; -import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; -import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; -import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; import com.webank.wedatasphere.exchangis.job.server.utils.JsonEntity; -import org.apache.commons.lang.StringUtils; -import org.apache.linkis.manager.label.utils.LabelUtils; +import com.webank.wedatasphere.exchangis.job.utils.MemUtils; +import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.net.URI; -import java.net.URISyntaxException; import java.util.*; -import java.util.function.BiFunction; -import static com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob.*; - -public class DataxExchangisEngineJobBuilder extends AbstractExchangisJobBuilder { +/** + * Datax engine job builder + */ +public class DataxExchangisEngineJobBuilder extends AbstractResourceEngineJobBuilder { private static final Logger LOG = LoggerFactory.getLogger(DataxExchangisEngineJob.class); + private static final String BYTE_SPEED_SETTING_PARAM = "setting.speed.byte"; + + private static final String PROCESSOR_SWITCH = "setting.useProcessor"; + + private static final String PROCESSOR_BASE_PATH = "core.processor.loader.plugin.sourcePath"; + + private static final Map PLUGIN_NAME_MAPPER = new HashMap<>(); + + static{ + //hive use hdfs plugin resource + PLUGIN_NAME_MAPPER.put("hive", "hdfs"); + } + /** - * Mapping params + * Column mappings define */ - private static final JobParamDefine>> TRANSFORM_MAPPING = JobParams.define("mapping"); + private static final JobParamDefine COLUMN_MAPPINGS = JobParams.define("column.mappings", job -> { + DataxMappingContext mappingContext = new DataxMappingContext(); + job.getSourceColumns().forEach(columnDefine -> mappingContext.getSourceColumns().add( + new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), columnDefine.getIndex() + "") + )); + job.getSinkColumns().forEach(columnDefine -> mappingContext.getSinkColumns().add( + new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), columnDefine.getIndex() + "") + )); + job.getColumnFunctions().forEach(function -> { + DataxMappingContext.Transformer.Parameter parameter = new DataxMappingContext.Transformer.Parameter(); + parameter.setColumnIndex(function.getIndex() + ""); + parameter.setParas(function.getParams()); + mappingContext.getTransformers() + .add(new DataxMappingContext.Transformer(function.getName(), parameter)); + }); + return mappingContext; + }, SubExchangisJob.class); /** - * Source params for column + * Source content */ - private static final JobParamDefine SOURCE_FIELD_NAME = JobParams.define("name", "source_field_name", String.class); - private static final JobParamDefine SOURCE_FIELD_TYPE = JobParams.define("type", "source_field_type", String.class); - private static final JobParamDefine SOURCE_FIELD_INDEX = JobParams.define("index", "source_field_index", Integer.class); + private static final JobParamDefine PLUGIN_SOURCE_NAME = JobParams.define("content[0].reader.name", job -> + getPluginName(job.getSourceType(), "reader"), SubExchangisJob.class); + + private static final JobParamDefine> PLUGIN_SOURCE_PARAM = JobParams.define("content[0].reader.parameter", job -> + job.getParamsToMap(SubExchangisJob.REALM_JOB_CONTENT_SOURCE, false), SubExchangisJob.class); /** - * Sink params for column + * Sink content */ - private static final JobParamDefine SINK_FIELD_NAME = JobParams.define("name", "sink_field_name", String.class); - private static final JobParamDefine SINK_FIELD_TYPE = JobParams.define("type", "sink_field_type", String.class); - private static final JobParamDefine SINK_FIELD_INDEX = JobParams.define("index", "sink_field_index", Integer.class); - - private static final JobParamDefine>> SOURCE_COLUMN = JobParams.define("column", (BiFunction>>) (key, paramSet) -> { + private static final JobParamDefine PLUGIN_SINK_NAME = JobParams.define("content[0]].writer.name", job -> + getPluginName(job.getSinkType(), "writer"), SubExchangisJob.class); - List> columns = new ArrayList<>(); - List> mappings = TRANSFORM_MAPPING.newParam(paramSet).getValue(); - if (Objects.nonNull(mappings)) { - mappings.forEach(mapping -> { - Map _mapping = new HashMap<>(mapping); - Map column = new HashMap<>(); - columns.add(column); - column.put(SOURCE_FIELD_NAME.getKey(), SOURCE_FIELD_NAME.newParam(_mapping).getValue()); - column.put(SOURCE_FIELD_TYPE.getKey(), SOURCE_FIELD_TYPE.newParam(_mapping).getValue()); - column.put(SOURCE_FIELD_INDEX.getKey(), SOURCE_FIELD_INDEX.newParam(_mapping).getValue()); - }); - } - return columns; - }); + private static final JobParamDefine> PLUGIN_SINK_PARAM = JobParams.define("content[0].writer.parameter", job -> + job.getParamsToMap(SubExchangisJob.REALM_JOB_CONTENT_SINK, false), SubExchangisJob.class); - private static final JobParamDefine>> SINK_COLUMN = JobParams.define("column", (BiFunction>>) (key, paramSet) -> { - List> columns = new ArrayList<>(); - List> mappings = TRANSFORM_MAPPING.newParam(paramSet).getValue(); - if (Objects.nonNull(mappings)) { - mappings.forEach(mapping -> { - Map _mapping = new HashMap<>(mapping); - Map column = new HashMap<>(); - columns.add(column); - column.put(SINK_FIELD_NAME.getKey(), SINK_FIELD_NAME.newParam(_mapping).getValue()); - column.put(SINK_FIELD_TYPE.getKey(), SINK_FIELD_TYPE.newParam(_mapping).getValue()); - column.put(SINK_FIELD_INDEX.getKey(), SINK_FIELD_INDEX.newParam(_mapping).getValue()); - }); - } - return columns; - }); + /** + * Source columns + */ + private static final JobParamDefine> SOURCE_COLUMNS = JobParams.define("content[0].reader.parameter.column", + DataxMappingContext::getSourceColumns,DataxMappingContext.class); - private static final JobParamDefine> TRANSFORMER = JobParams.define("column", (BiFunction>) (key, paramSet) -> { - List transformers = new ArrayList<>(); - List> mappings = TRANSFORM_MAPPING.newParam(paramSet).getValue(); - if (Objects.nonNull(mappings)) { - mappings.forEach(mapping -> { - Map _mapping = new HashMap<>(mapping); - int fieldIndex = SOURCE_FIELD_INDEX.newParam(_mapping).getValue(); - Object validator = mapping.get("validator"); - if (null != validator) { - List params = (List) validator; - if (params.size() > 0) { - Transformer transformer = new Transformer(); - transformer.setName("dx_filter"); - TransformerParameter parameter = new TransformerParameter(); - parameter.setColumnIndex(fieldIndex); - parameter.setParas(params.toArray(new String[0])); - transformer.setParameter(parameter); - transformers.add(transformer); - } - } + /** + * Sink columns + */ + private static final JobParamDefine> SINK_COLUMNS = JobParams.define("content[0].writer.parameter.column", + DataxMappingContext::getSinkColumns,DataxMappingContext.class); - Object transfomer = mapping.get("transformer"); - if (null != transfomer) { - Map define = (Map) transfomer; - if (null != define.get("name") && !StringUtils.isBlank(define.get("name").toString())) { - Transformer transformer = new Transformer(); - transformer.setName(define.get("name").toString()); - TransformerParameter parameter = new TransformerParameter(); - parameter.setColumnIndex(fieldIndex); - parameter.setParas(new String[0]); - Object params = define.get("params"); - if (null != params) { - List paramsDefine = (List) params; - if (null != paramsDefine && paramsDefine.size() > 0) { - parameter.setParas(paramsDefine.toArray(new String[0])); - } - } - transformer.setParameter(parameter); - transformers.add(transformer); - } - } - }); - } - return transformers; - }); + /** + * Transform list + */ + private static final JobParamDefine> TRANSFORM_LIST = JobParams.define("content[0].transformer", + DataxMappingContext::getTransformers, DataxMappingContext.class); @Override public int priority() { @@ -141,580 +112,109 @@ public boolean canBuild(SubExchangisJob inputJob) { public DataxExchangisEngineJob buildJob(SubExchangisJob inputJob, ExchangisEngineJob expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { try { - DataxExchangisEngineJob engineJob = new DataxExchangisEngineJob(); + DataxExchangisEngineJob engineJob = new DataxExchangisEngineJob(expectOut); engineJob.setId(inputJob.getId()); - - DataxCode dataxCode = buildDataxCode(inputJob, ctx); - - engineJob.setCode(Json.toJson(dataxCode, null)); - try { - LOG.info("Datax-code built complete, output: " + Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(dataxCode)); - } catch (JsonProcessingException e) { - //Ignore + Map codeMap = buildDataxCode(inputJob, ctx); + if (Objects.nonNull(codeMap)){ + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Datax-code built complete, output: " + Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(codeMap)); + } + info("Datax-code built complete, output: " + Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(codeMap)); + } catch (JsonProcessingException e) { + //Ignore + } + engineJob.setCode(codeMap); } - if (Objects.nonNull(expectOut)) { - engineJob.setName(expectOut.getName()); - engineJob.setEngineType(expectOut.getEngineType()); + // engine resources + engineJob.getResources().addAll( + getResources(inputJob.getEngineType().toLowerCase(Locale.ROOT), getResourcesPaths(inputJob))); + if (inputJob instanceof TransformExchangisJob.TransformSubExchangisJob){ + TransformExchangisJob.TransformSubExchangisJob transformJob = ((TransformExchangisJob.TransformSubExchangisJob) inputJob); + TransformTypes type = transformJob.getTransformType(); + if (type == TransformTypes.PROCESSOR){ + settingProcessorInfo(transformJob, engineJob); + } } - - engineJob.setRuntimeParams(inputJob.getParamsToMap(SubExchangisJob.REALM_JOB_SETTINGS, false)); engineJob.setName(inputJob.getName()); - if (Objects.nonNull(expectOut)) { - engineJob.setName(expectOut.getName()); - engineJob.setEngineType(expectOut.getEngineType()); - } + //Unit MB + Optional.ofNullable(engineJob.getRuntimeParams().get(BYTE_SPEED_SETTING_PARAM)).ifPresent(byteLimit -> { + long limit = Long.parseLong(String.valueOf(byteLimit)); + // Convert to bytes + engineJob.getRuntimeParams().put(BYTE_SPEED_SETTING_PARAM, + MemUtils.convertToByte(limit, MemUtils.StoreUnit.MB.name())); + }); + engineJob.setCreateUser(inputJob.getCreateUser()); + // Lock the memory unit + engineJob.setMemoryUnitLock(true); return engineJob; } catch (Exception e) { - throw new ExchangisJobException(ExchangisJobExceptionCode.ENGINE_JOB_ERROR.getCode(), + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_ENGINE_ERROR.getCode(), "Fail to build datax engine job, message:[" + e.getMessage() + "]", e); } } /** - * Datax code - * + * Build datax code content * @param inputJob input job - * @return code + * @param ctx ctx + * @return code map */ - private DataxCode buildDataxCode(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - - DataxCode code = new DataxCode(); - String sourceType = inputJob.getSourceType(); - String sinkType = inputJob.getSinkType(); - Content content = new Content(); - if (sourceType.equalsIgnoreCase("mysql")) { - content.getReader().putAll(this.buildMySQLReader(inputJob, ctx)); - } - if (sourceType.equalsIgnoreCase("hive")) { - content.getReader().putAll(this.buildHiveReader(inputJob, ctx)); - } - if (sinkType.equalsIgnoreCase("mysql")) { - content.getWriter().putAll(this.buildMySQLWriter(inputJob, ctx)); - } - if (sinkType.equalsIgnoreCase("hive")) { - content.getWriter().putAll(this.buildHiveWriter(inputJob, ctx)); - } - - content.getTransformer().addAll(this.buildTransformer(inputJob, ctx)); - - code.getContent().add(content); - code.getSetting().putAll(this.buildSettings(inputJob, ctx)); - - //To construct settings -// JobParamSet paramSet = inputJob.getRealmParams(SubExchangisJob.REALM_JOB_SETTINGS); -// if (Objects.nonNull(paramSet)) { -// JsonConfiguration setting = JsonConfiguration.from(code.getSetting()); -// paramSet.toList().forEach(param -> setting.set(param.getStrKey(), param.getValue())); -// code.setSetting(setting.toMap()); -// } - -// //To construct content -// Content content1 = new Content(); -// JobParamSet transformJobParamSet = inputJob.getRealmParams(SubExchangisJob.REALM_JOB_COLUMN_MAPPING); -// paramSet = inputJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); -// JsonConfiguration reader = JsonConfiguration.from(content1.getReader()); -// if(Objects.nonNull(paramSet)){ -// if (StringUtils.isNotBlank(inputJob.getSourceType())){ -// reader.set("name", inputJob.getSourceType().toLowerCase() + "reader"); -// reader.set("parameter", buildContentParam(paramSet, transformJobParamSet, SOURCE_COLUMN)); -// content1.setReader(reader.toMap()); -// } -// } -// paramSet = inputJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); -// JsonConfiguration writer = JsonConfiguration.from(content1.getReader()); -// if(Objects.nonNull(paramSet)){ -// if (StringUtils.isNotBlank(inputJob.getSinkType())){ -// writer.set("name", inputJob.getSinkType().toLowerCase() + "writer"); -// writer.set("parameter", buildContentParam(paramSet, transformJobParamSet, SINK_COLUMN)); -// content1.setWriter(writer.toMap()); -// } -// } - return code; - } - - private Map buildContentParam(JobParamSet paramSet, JobParamSet transformJobParamSet, - JobParamDefine>> columnJobParamDefine) { - JsonEntity item = JsonEntity.from("{}"); - //Ignore temp params - paramSet.toList(false).forEach(param -> item.set(param.getStrKey(), param.getValue())); - if (Objects.nonNull(transformJobParamSet)) { - item.set(columnJobParamDefine.getKey(), columnJobParamDefine.newParam(transformJobParamSet).getValue()); - } - return item.toMap(); - } - - private Map buildMySQLReader(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - Map reader = new HashMap<>(); - reader.put("name", "mysqlreader"); - - JobParamSet sourceSettings = inputJob.getRealmParams(REALM_JOB_CONTENT_SOURCE); - - Map parameter = new HashMap<>(); - parameter.put("connParams", new HashMap()); - parameter.put("haveKerberos", false); - parameter.put("datasource", Integer.parseInt(sourceSettings.get("datasource").getValue().toString())); - - parameter.put("username", ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("username")); - String password = ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("password").toString(); - parameter.put("password", password); - List> columns = SOURCE_COLUMN.newParam(inputJob.getRealmParams(REALM_JOB_COLUMN_MAPPING)).getValue(); - parameter.put("column_i", columns); - parameter.put("alias", "[\"A\"]"); - - List> connections = new ArrayList<>(1); - Map connection = new HashMap<>(); - Object host = ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("host"); - Object port = ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("port"); - Object database = sourceSettings.get("database").getValue(); - List> jdbcUrls = new ArrayList<>(); - Map jdbcUrl = new HashMap<>(); - jdbcUrl.put("host", host); - jdbcUrl.put("port", port); - jdbcUrl.put("database", database); - jdbcUrls.add(jdbcUrl); - connection.put("jdbcUrl", jdbcUrls); - - parameter.put("authType", "DEFAULT"); - - List tables = new ArrayList<>(); - tables.add(sourceSettings.get("table").getValue().toString()); - // connection.put("table", tables); - parameter.put("table", LabelUtils.Jackson.toJson(tables, String.class)); - - StringBuilder sql = new StringBuilder("SELECT "); - for (Iterator> iterator = columns.iterator(); iterator.hasNext(); ) { - Map field = iterator.next(); - sql.append("A.").append(field.get("name")); - if (iterator.hasNext()) { - sql.append(", "); - } else { - sql.append(" "); - } - } - - sql.append("FROM ").append(sourceSettings.get("table").getValue().toString()).append(" A"); - - if (null != sourceSettings.get("exchangis.job.ds.params.datax.mysql.r.where_condition") - && null != sourceSettings.get("exchangis.job.ds.params.datax.mysql.r.where_condition").getValue() - && StringUtils.isNotBlank(sourceSettings.get("exchangis.job.ds.params.datax.mysql.r.where_condition").getValue().toString())) { - sql.append(" WHERE ").append(sourceSettings.get("exchangis.job.ds.params.datax.mysql.r.where_condition").getValue().toString()); - } - - List querySql = new ArrayList<>(); - querySql.add(sql.toString()); - connection.put("querySql", querySql); - - connections.add(connection); - parameter.put("connection", connections); - - reader.put("parameter", parameter); - return reader; - } - - private Map buildMySQLWriter(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - Map writer = new HashMap<>(); - writer.put("name", "mysqlwriter"); - Map parameter = new HashMap<>(); - parameter.put("haveKerberos", false); - parameter.put("connParams", new HashMap()); - - JobParamSet sinkSettings = inputJob.getRealmParams(REALM_JOB_CONTENT_SINK); - parameter.put("datasource", Integer.parseInt(sinkSettings.get("datasource").getValue().toString())); - parameter.put("authType", "DEFAULT"); - parameter.put("username", ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("username")); - String password = ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("password").toString(); - // parameter.put("password", Base64.getEncoder().encodeToString(password.getBytes(StandardCharsets.UTF_8))); - parameter.put("password", password); - - List> columns = SINK_COLUMN.newParam(inputJob.getRealmParams(REALM_JOB_COLUMN_MAPPING)).getValue(); - parameter.put("column_i", columns); - parameter.put("column", columns.stream().map(map -> map.get("name")).toArray()); - - List> connections = new ArrayList<>(1); - Map connection = new HashMap<>(); - String connectProtocol = "jdbc:mysql://"; - Object host = ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("host"); - Object port = ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("port"); - Object database = sinkSettings.get("database").getValue(); - Map jdbcUrl = new HashMap<>(); - jdbcUrl.put("host", host); - jdbcUrl.put("port", port); - jdbcUrl.put("database", database); - connection.put("jdbcUrl", jdbcUrl); - List tables = new ArrayList<>(); - tables.add(sinkSettings.get("table").getValue().toString()); - connection.put("table", tables); - connections.add(connection); - parameter.put("connection", connections); - - parameter.put("writeMode", sinkSettings.get("exchangis.job.ds.params.datax.mysql.w.write_type").getValue().toString().toLowerCase(Locale.ROOT)); - - writer.put("parameter", parameter); - return writer; - } - - private Map buildHiveReader(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - Map reader = new HashMap<>(); - reader.put("name", "hdfsreader"); - - Map parameter = new HashMap<>(); - parameter.put("nullFormat", "\\\\N"); - parameter.put("haveKerberos", false); - parameter.put("hadoopConfig", new HashMap()); - parameter.put("authType", "NONE"); - - JobParamSet sourceSettings = inputJob.getRealmParams(REALM_JOB_CONTENT_SOURCE); - parameter.put("hiveTable", sourceSettings.get("table").getValue().toString()); - parameter.put("hiveDatabase", sourceSettings.get("database").getValue()); - parameter.put("encoding", "UTF-8"); - parameter.put("datasource", Integer.parseInt(sourceSettings.get("datasource").getValue().toString())); - try { - String location = ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("location").toString(); - URI uri = new URI(location); - if (uri.getPort() != -1) { - parameter.put("defaultFS", String.format("%s://%s:%d", uri.getScheme(), uri.getHost(), uri.getPort())); - } else { - parameter.put("defaultFS", String.format("%s://%s", uri.getScheme(), uri.getHost())); - } - parameter.put("path", uri.getPath()); - - } catch (URISyntaxException e) { - e.printStackTrace(); - } - - List> columns = SOURCE_COLUMN.newParam(inputJob.getRealmParams(REALM_JOB_COLUMN_MAPPING)).getValue(); - - // column -// List> columns = new ArrayList<>(); -// String[] columnsType = ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("columns-types").toString().split(":"); -// for (int i = 0; i < columnsType.length; i++) { -// Map column = new HashMap<>(); -// column.put("index", i); -// column.put("type", columnsType[i]); -// columns.add(column); -// } - parameter.put("column", columns); - - - String inputFormat = ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("file-inputformat").toString().toLowerCase(Locale.ROOT); - // org.apache.hadoop.mapred.TextInputFormat - if (inputFormat.contains("text")) { - parameter.put("fileType", "TEXT"); - parameter.put("fieldDelimiter", ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("column-name-delimiter").toString()); - } - if (inputFormat.contains("orc")) { - parameter.put("fileType", "ORC"); - } - if (inputFormat.contains("parquet")) { - parameter.put("fileType", "PARQUET"); - } - if (inputFormat.contains("rcfile")) { - parameter.put("fileType", "RCFILE"); - } - if (inputFormat.contains("sequencefile")) { - parameter.put("fileType", "SEQ"); - parameter.put("fieldDelimiter", ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("column-name-delimiter").toString()); - } - - parameter.put("hiveMetastoreUris", ctx.getDatasourceParam(sourceSettings.get("datasource").getValue().toString()).get("uris").toString()); - - reader.put("parameter", parameter); - return reader; - } - - private Map buildHiveWriter(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - Map writer = new HashMap<>(); - writer.put("name", "hdfswriter"); - Map parameter = new HashMap<>(); - - JobParamSet sinkSettings = inputJob.getRealmParams(REALM_JOB_CONTENT_SINK); - parameter.put("nullFormat", "\\\\N"); - parameter.put("encoding", "UTF-8"); - parameter.put("hiveTable", sinkSettings.get("table").getValue().toString()); - parameter.put("hiveDatabase", sinkSettings.get("database").getValue().toString()); - parameter.put("authType", "NONE"); - parameter.put("hadoopConfig", new HashMap()); - parameter.put("haveKerberos", false); - parameter.put("fileName", "exchangis_hive_w"); - parameter.put("datasource", Integer.parseInt(sinkSettings.get("datasource").getValue().toString())); - try { - String location = ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("location").toString(); - URI uri = new URI(location); - if (uri.getPort() != -1) { - parameter.put("defaultFS", String.format("%s://%s:%d", uri.getScheme(), uri.getHost(), uri.getPort())); - } else { - parameter.put("defaultFS", String.format("%s://%s", uri.getScheme(), uri.getHost())); - } - parameter.put("path", uri.getPath()); - } catch (URISyntaxException e) { - e.printStackTrace(); - } - - parameter.put("compress", "GZIP"); - - List> columns = SINK_COLUMN.newParam(inputJob.getRealmParams(REALM_JOB_COLUMN_MAPPING)).getValue(); - - // column -// List> columns = new ArrayList<>(); -// String[] columnsType = ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("columns-types").toString().split(":"); -// for (int i = 0; i < columnsType.length; i++) { -// Map column = new HashMap<>(); -// column.put("index", i); -// column.put("type", columnsType[i]); -// columns.add(column); -// } - parameter.put("column", columns); - - String inputFormat = ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("file-inputformat").toString().toLowerCase(Locale.ROOT); - // org.apache.hadoop.mapred.TextInputFormat - if (inputFormat.contains("text")) { - parameter.put("fileType", "text"); - parameter.put("fieldDelimiter", ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("column-name-delimiter").toString()); - } - if (inputFormat.contains("orc")) { - parameter.put("fileType", "orc"); - } - if (inputFormat.contains("parquet")) { - parameter.put("fileType", "parquet"); - } - if (inputFormat.contains("rcfile")) { - parameter.put("fileType", "rcfile"); - } - if (inputFormat.contains("sequencefile")) { - parameter.put("fileType", "seq"); - parameter.put("fieldDelimiter", ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("column-name-delimiter").toString()); - } - - parameter.put("fileName", sinkSettings.get("table").getValue().toString()); - - String writeMdoe = sinkSettings.get("exchangis.job.ds.params.datax.hive.w.write_type").getValue().toString(); - if (writeMdoe.equals("清空目录")) { - parameter.put("writeMode", "truncate"); - } else { - parameter.put("writeMode", "append"); - } - - parameter.put("hiveMetastoreUris", ctx.getDatasourceParam(sinkSettings.get("datasource").getValue().toString()).get("uris").toString()); - - writer.put("parameter", parameter); - return writer; - } - - private List buildTransformer(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - List transformers = TRANSFORMER.newParam(inputJob.getRealmParams(REALM_JOB_COLUMN_MAPPING)).getValue(); - return transformers; - } - - private Map buildSettings(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx) { - Map settings = new HashMap<>(); - settings.put("useProcessor", "false"); - - JobParamSet sourceSettings = inputJob.getRealmParams(REALM_JOB_CONTENT_SOURCE); - JobParamSet sinkSettings = inputJob.getRealmParams(REALM_JOB_CONTENT_SINK); - if (inputJob.getSinkType().equalsIgnoreCase("hive")) { - if (null != sinkSettings.get("exchangis.job.ds.params.datax.hive.w.sync_meta") - && null != sinkSettings.get("exchangis.job.ds.params.datax.hive.w.sync_meta").getValue() - && "是".equals(sinkSettings.get("exchangis.job.ds.params.datax.hive.w.sync_meta").getValue().toString())) { - settings.put("syncMeta", "true"); - } else { - settings.put("syncMeta", "false"); - } - } - - if (inputJob.getSourceType().equalsIgnoreCase("hive")) { - Map transport = new HashMap<>(); - if (null != sinkSettings.get("exchangis.job.ds.params.datax.hive.r.trans_proto") - && null != sinkSettings.get("exchangis.job.ds.params.datax.hive.r.trans_proto").getValue() - && "二进制".equals(sinkSettings.get("exchangis.job.ds.params.datax.hive.r.trans_proto").getValue().toString())) { - transport.put("type", "stream"); - } else { - transport.put("type", "record"); - } - settings.put("transport", transport); - } - - JobParamSet jobSettings = inputJob.getRealmParams(SubExchangisJob.REALM_JOB_SETTINGS); - if (Objects.nonNull(jobSettings)) { - Map errorLimit = new HashMap<>(); - errorLimit.put("record", "0"); - if (null != jobSettings.get("exchangis.datax.setting.errorlimit.record")) { - JobParam _errorLimit = jobSettings.get("exchangis.datax.setting.errorlimit.record"); - if (null != _errorLimit && null != _errorLimit.getValue()) { - String value = _errorLimit.getValue().toString(); - if (StringUtils.isNotBlank(value)) { - errorLimit.put("record", value); - } - } - } - settings.put("errorLimit", errorLimit); - - Map advance = new HashMap<>(); - advance.put("mMemory", "1g"); - if (null != jobSettings.get("exchangis.datax.setting.max.memory")) { - JobParam _maxMemory = jobSettings.get("exchangis.datax.setting.max.memory"); - if (null != _maxMemory && null != _maxMemory.getValue()) { - String value = _maxMemory.getValue().toString(); - if (StringUtils.isNotBlank(value)) { - advance.put("mMemory", value + "m"); - } - } - } - - settings.put("advance", advance); - - Map speed = new HashMap<>(); - speed.put("byte", "10485760"); - speed.put("record", "0"); - speed.put("channel", "0"); - - if (null != jobSettings.get("exchangis.datax.setting.speed.bytes")) { - JobParam _byteSpeed = jobSettings.get("exchangis.datax.setting.speed.bytes"); - if (null != _byteSpeed && null != _byteSpeed.getValue()) { - String value = _byteSpeed.getValue().toString(); - if (StringUtils.isNotBlank(value)) { - speed.put("byte", String.valueOf(Integer.parseInt(value) * 1024)); - } - } - } - - if (null != jobSettings.get("exchangis.datax.setting.speed.records")) { - JobParam _recordSpeed = jobSettings.get("exchangis.datax.setting.speed.records"); - if (null != _recordSpeed && null != _recordSpeed.getValue()) { - String value = _recordSpeed.getValue().toString(); - if (StringUtils.isNotBlank(value)) { - speed.put("record", value); - } - } - } - - if (null != jobSettings.get("exchangis.datax.setting.max.parallelism")) { - JobParam _channel = jobSettings.get("exchangis.datax.setting.max.parallelism"); - if (null != _channel && null != _channel.getValue()) { - String value = _channel.getValue().toString(); - if (StringUtils.isNotBlank(value)) { - speed.put("channel", value); - } - } - } - - settings.put("speed", speed); - - } - - return settings; - } - - public static class DataxCode { - /** - * Setting - */ - private Map setting = new HashMap<>(); - - /** - * Content list - */ - private List content = new ArrayList<>(); - - public Map getSetting() { - return setting; - } - - public void setSetting(Map setting) { - this.setting = setting; - } - - public List getContent() { - return content; - } - - public void setContent(List content) { - this.content = content; - } + private Map buildDataxCode(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx){ + JsonEntity dataxJob = JsonEntity.from("{}"); + dataxJob.set(PLUGIN_SOURCE_NAME.getKey(), PLUGIN_SOURCE_NAME.getValue(inputJob)); + Optional.ofNullable(PLUGIN_SOURCE_PARAM.getValue(inputJob)).ifPresent(source -> source.forEach((key, value) ->{ + dataxJob.set(PLUGIN_SOURCE_PARAM.getKey() + "." + key, value); + })); + dataxJob.set(PLUGIN_SINK_NAME.getKey(), PLUGIN_SINK_NAME.getValue(inputJob)); + Optional.ofNullable(PLUGIN_SINK_PARAM.getValue(inputJob)).ifPresent(sink -> sink.forEach((key, value) -> { + dataxJob.set(PLUGIN_SINK_PARAM.getKey() + "." + key, value); + })); + DataxMappingContext mappingContext = COLUMN_MAPPINGS.getValue(inputJob); + if (Objects.isNull(dataxJob.get(SOURCE_COLUMNS.getKey()))) { + dataxJob.set(SOURCE_COLUMNS.getKey(), SOURCE_COLUMNS.getValue(mappingContext)); + } + if (Objects.isNull(dataxJob.get(SINK_COLUMNS.getKey()))){ + dataxJob.set(SINK_COLUMNS.getKey(), SINK_COLUMNS.getValue(mappingContext)); + } + dataxJob.set(TRANSFORM_LIST.getKey(), TRANSFORM_LIST.getValue(mappingContext)); + return dataxJob.toMap(); } - public static class Content { - /** - * Reader - */ - private Map reader = new HashMap<>(); - /** - * Writer - */ - private Map writer = new HashMap<>(); - - private List transformer = new ArrayList<>(); - - public Map getReader() { - return reader; - } - - public void setReader(Map reader) { - this.reader = reader; - } - - public Map getWriter() { - return writer; - } - - public void setWriter(Map writer) { - this.writer = writer; - } - - - public List getTransformer() { - return transformer; - } - - public void setTransformer(List transformer) { - this.transformer = transformer; - } + /** + * Setting processor info into engine job + * @param transformJob transform job + * @param engineJob engine job + */ + private void settingProcessorInfo(TransformExchangisJob.TransformSubExchangisJob transformJob, ExchangisEngineJob engineJob){ + Optional.ofNullable(transformJob.getCodeResource()).ifPresent(codeResource ->{ + engineJob.getRuntimeParams().put(PROCESSOR_SWITCH, true); + Object basePath = engineJob.getRuntimeParams().computeIfAbsent(PROCESSOR_BASE_PATH, key -> "proc/src"); + engineJob.getResources().add(new EngineBmlResource(engineJob.getEngineType(), ".", + String.valueOf(basePath) + IOUtils.DIR_SEPARATOR_UNIX + "code_" + transformJob.getId(), + codeResource.getResourceId(), codeResource.getVersion(), transformJob.getCreateUser())); + }); } - - public static class Transformer { - - private TransformerParameter parameter; - private String name; - - public TransformerParameter getParameter() { - return parameter; - } - - public void setParameter(TransformerParameter parameter) { - this.parameter = parameter; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - + private String[] getResourcesPaths(SubExchangisJob inputJob){ + return new String[]{ + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + IOUtils.DIR_SEPARATOR_UNIX + "reader" + IOUtils.DIR_SEPARATOR_UNIX + + PLUGIN_SOURCE_NAME.getValue(inputJob), + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + IOUtils.DIR_SEPARATOR_UNIX + "writer" + IOUtils.DIR_SEPARATOR_UNIX + + PLUGIN_SINK_NAME.getValue(inputJob) + }; } - - public static class TransformerParameter { - - private int columnIndex; - private String[] paras; - - public int getColumnIndex() { - return columnIndex; - } - - public void setColumnIndex(int columnIndex) { - this.columnIndex = columnIndex; - } - - public String[] getParas() { - return paras; - } - - public void setParas(String[] paras) { - this.paras = paras; - } + // core.processor.loader.plugin.sourcePath + /** + * Plugin name + * @param typeName type name + * @param suffix suffix + * @return plugin name + */ + private static String getPluginName(String typeName, String suffix){ + return Objects.nonNull(typeName) ? PLUGIN_NAME_MAPPER.getOrDefault(typeName.toLowerCase(Locale.ROOT), + typeName.toLowerCase(Locale.ROOT)) + + suffix : null; } - } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java new file mode 100644 index 000000000..5bc868162 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java @@ -0,0 +1,181 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import java.util.ArrayList; +import java.util.List; + +/** + * Datax mapping context + */ +public class DataxMappingContext { + + /** + * Source columns + */ + private List sourceColumns = new ArrayList<>(); + + /** + * Sink columns + */ + private List sinkColumns = new ArrayList<>(); + + /** + * Transform + */ + private List transformers = new ArrayList<>(); + + public List getSourceColumns() { + return sourceColumns; + } + + public void setSourceColumns(List sourceColumns) { + this.sourceColumns = sourceColumns; + } + + public List getSinkColumns() { + return sinkColumns; + } + + public void setSinkColumns(List sinkColumns) { + this.sinkColumns = sinkColumns; + } + + public List getTransformers() { + return transformers; + } + + public void setTransformers(List transformers) { + this.transformers = transformers; + } + + /** + * Column entity + */ + public static class Column{ + /** + * Colum name + */ + private String name; + + /** + * Column type + */ + private String type; + + /** + * Index name + */ + private String index; + + public Column(){ + + } + + public Column(String name, String type, String index){ + this.name = name; + this.type = type; + this.index = index; + } + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + } + + /** + * Transformer + */ + public static class Transformer { + + /** + * Parameter context + */ + private Parameter parameter = new Parameter(); + /** + * Name + */ + private String name; + + public Transformer(){ + + } + + public Transformer(String name, Parameter parameter){ + this.name = name; + this.parameter = parameter; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Parameter getParameter() { + return parameter; + } + + public void setParameter(Parameter parameter) { + this.parameter = parameter; + } + + /** + * Parameter + */ + public static class Parameter { + /** + * Index + */ + private String columnIndex; + + /** + * Params + */ + private List paras = new ArrayList<>(); + + public Parameter(){ + + } + + public Parameter(List paras){ + this.paras = paras; + } + + public String getColumnIndex() { + return columnIndex; + } + + public void setColumnIndex(String columnIndex) { + this.columnIndex = columnIndex; + } + + public List getParas() { + return paras; + } + + public void setParas(List paras) { + this.paras = paras; + } + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java index 1ab5241d9..73d4f0a91 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java @@ -10,17 +10,9 @@ public class SqoopExchangisEngineJob extends ExchangisEngineJob { //Empty - public SqoopExchangisEngineJob(){ - - } - public SqoopExchangisEngineJob(ExchangisEngineJob engineJob){ - if (Objects.nonNull(engineJob)) { - setName(engineJob.getName()); - setEngineType(engineJob.getEngineType()); - getJobContent().putAll(engineJob.getJobContent()); - getRuntimeParams().putAll(engineJob.getRuntimeParams()); - setMemoryUsed(engineJob.getMemoryUsed()); - } + super(engineJob); } + + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java index 7ab5e2fd7..5db542a42 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java @@ -2,6 +2,7 @@ import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; @@ -13,8 +14,9 @@ import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.AbstractLoggingExchangisJobBuilder; import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; -import com.webank.wedatasphere.exchangis.job.server.builder.ServiceInExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,7 +25,6 @@ import java.net.URLEncoder; import java.util.*; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; import java.util.stream.Collectors; import static com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob.REALM_JOB_CONTENT_SINK; @@ -58,34 +59,34 @@ public enum MODE_TYPE { IMPORT, EXPORT} /** * Verbose, default null (means not open verbose) */ - private static final JobParamDefine VERBOSE = JobParams.define("sqoop.args.verbose", (BiFunction) (k, job) -> null); + private static final JobParamDefine VERBOSE = JobParams.define("sqoop.args.verbose", job -> null, SubExchangisJob.class); /** * Sqoop mode */ - private static final JobParamDefine MODE = JobParams.define("sqoop.mode", (BiFunction) (k, job) -> SUPPORT_BIG_DATA_TYPES.contains(job.getSourceType().toUpperCase())? "export": "import"); + private static final JobParamDefine MODE = JobParams.define("sqoop.mode", job -> SUPPORT_BIG_DATA_TYPES.contains(job.getSourceType().toUpperCase())? "export": "import", SubExchangisJob.class); - private static final JobParamDefine MODE_ENUM = JobParams.define("sqoop.mode.enum", (BiFunction) (k, job) -> SUPPORT_BIG_DATA_TYPES.contains(job.getSourceType().toUpperCase())? EXPORT: IMPORT); + private static final JobParamDefine MODE_ENUM = JobParams.define("sqoop.mode.enum", job -> SUPPORT_BIG_DATA_TYPES.contains(job.getSourceType().toUpperCase())? EXPORT: IMPORT, SubExchangisJob.class); /** * Sqoop RDBMS mode params */ - private static final JobParamDefine MODE_RDBMS_PARAMS = JobParams.define("sqoop.mode.rdbms.params", (BiFunction) (k, job) -> { + private static final JobParamDefine MODE_RDBMS_PARAMS = JobParams.define("sqoop.mode.rdbms.params", job -> { MODE_TYPE modeParam = MODE_ENUM.getValue(job); return modeParam.equals(IMPORT)? job.getRealmParams(REALM_JOB_CONTENT_SOURCE) : job.getRealmParams(REALM_JOB_CONTENT_SINK); - }); + }, SubExchangisJob.class); /** * Sqoop hadoop mode params */ - private static final JobParamDefine MODE_HADOOP_PARAMS = JobParams.define("sqoop.mode.hadoop.params", (BiFunction) (k, job) -> { + private static final JobParamDefine MODE_HADOOP_PARAMS = JobParams.define("sqoop.mode.hadoop.params", job -> { MODE_TYPE modeParam = MODE_ENUM.getValue(job); return modeParam.equals(IMPORT)? job.getRealmParams(REALM_JOB_CONTENT_SINK) : job.getRealmParams(REALM_JOB_CONTENT_SOURCE); - }); + }, SubExchangisJob.class); /** * Hive-partition-map */ @SuppressWarnings("unchecked") - private static final JobParamDefine> PARTITION_MAP = JobParams.define("sqoop.partition.map", (BiFunction>) (k, job) -> { + private static final JobParamDefine> PARTITION_MAP = JobParams.define("sqoop.partition.map", job -> { if ("hive".equalsIgnoreCase(job.getSinkType()) || "hive".equalsIgnoreCase(job.getSourceType())){ JobParam partitionParam = MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.PARTITION); if (Objects.nonNull(partitionParam)) { @@ -109,18 +110,18 @@ public enum MODE_TYPE { IMPORT, EXPORT} } } return null; - }); + }, SubExchangisJob.class); /** * Meta columns */ - private static final JobParamDefine> META_COLUMNS = JobParams.define("sqoop.meta.table.columns", (BiFunction>) (k, paramSet) -> { - ServiceInExchangisJobBuilderContext context = getServiceInBuilderContext(); + private static final JobParamDefine> META_COLUMNS = JobParams.define("sqoop.meta.table.columns", paramSet -> { + SpringExchangisJobBuilderContext context = getSpringBuilderContext(); JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); JobParam database = paramSet.get(JobParamConstraints.DATABASE, String.class); JobParam table = paramSet.get(JobParamConstraints.TABLE, String.class); try { - return context.getMetadataInfoService().getColumns(context.getOriginalJob().getCreateUser(), + return getBean(MetadataInfoService.class).getColumns(context.getOriginalJob().getCreateUser(), Long.valueOf(dataSourceId.getValue()), database.getValue(), table.getValue()); } catch (ExchangisDataSourceException e) { throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); @@ -130,17 +131,17 @@ public enum MODE_TYPE { IMPORT, EXPORT} /** * Meta hadoop columns */ - private static final JobParamDefine> META_HADOOP_COLUMNS = JobParams.define("sqoop.meta.hadoop.table.columns", (BiFunction>) (k, job) -> META_COLUMNS.newValue(MODE_HADOOP_PARAMS.getValue(job))); + private static final JobParamDefine> META_HADOOP_COLUMNS = JobParams.define("sqoop.meta.hadoop.table.columns", job -> META_COLUMNS.newValue(MODE_HADOOP_PARAMS.getValue(job)), SubExchangisJob.class); /** * Meta rdbms columns */ - private static final JobParamDefine> META_RDBMS_COLUMNS = JobParams.define("sqoop.meta.rdbms.table.columns", (BiFunction>) (k, job) -> META_COLUMNS.newValue(MODE_RDBMS_PARAMS.getValue(job))); + private static final JobParamDefine> META_RDBMS_COLUMNS = JobParams.define("sqoop.meta.rdbms.table.columns", job -> META_COLUMNS.newValue(MODE_RDBMS_PARAMS.getValue(job)), SubExchangisJob.class); /** * Meta table/partition props */ - private static final JobParamDefine> META_HADOOP_TABLE_PROPS = JobParams.define("sqoop.meta.hadoop.table.props", (BiFunction>) (k, job) ->{ - ServiceInExchangisJobBuilderContext context = getServiceInBuilderContext(); + private static final JobParamDefine> META_HADOOP_TABLE_PROPS = JobParams.define("sqoop.meta.hadoop.table.props", job ->{ + SpringExchangisJobBuilderContext context = getSpringBuilderContext(); ExchangisJobInfo jobInfo = context.getOriginalJob(); // Use the creator as userName String userName = jobInfo.getCreateUser(); @@ -150,37 +151,39 @@ public enum MODE_TYPE { IMPORT, EXPORT} JobParam table = hadoopParamSet.get(JobParamConstraints.TABLE, String.class); Map partition = PARTITION_MAP.getValue(job); try { - if (Objects.isNull(partition)) { - return context.getMetadataInfoService().getTableProps(userName, Long.valueOf(dataSourceId.getValue()), - database.getValue(), table.getValue()); - } else { - return context.getMetadataInfoService().getPartitionProps(userName, Long.valueOf(dataSourceId.getValue()), + if (Objects.nonNull(partition)) { + Map props = getBean(MetadataInfoService.class).getPartitionProps(userName, Long.valueOf(dataSourceId.getValue()), database.getValue(), table.getValue(), URLEncoder.encode(partition.entrySet().stream().map(entry -> entry.getKey() + "=" + entry.getValue() ).collect(Collectors.joining(",")), "UTF-8")); + if (!props.isEmpty()){ + return props; + } } + return getBean(MetadataInfoService.class).getTableProps(userName, Long.valueOf(dataSourceId.getValue()), + database.getValue(), table.getValue()); } catch (ExchangisDataSourceException e) { throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); } catch (UnsupportedEncodingException e) { throw new ExchangisJobException.Runtime(-1, e.getMessage(), e); } - }); + }, SubExchangisJob.class); - private static final JobParamDefine IS_TEXT_FILE_TYPE = JobParams.define("sqoop.file.is.text", (BiFunction)(k, job) -> { + private static final JobParamDefine IS_TEXT_FILE_TYPE = JobParams.define("sqoop.file.is.text", job -> { Map tableProps = META_HADOOP_TABLE_PROPS.getValue(job); return HADOOP_TEXT_INPUT_FORMAT.contains(tableProps.getOrDefault(META_INPUT_FORMAT, "")) || HADOOP_TEXT_OUTPUT_FORMAT.contains(tableProps.getOrDefault(META_OUTPUT_FORMAT, "")); - }); + }, SubExchangisJob.class); /** * * Whether hcatalog */ - private static final JobParamDefine IS_USE_HCATALOG = JobParams.define("sqoop.use.hcatalog", (BiFunction)(k, job) -> MODE_ENUM.getValue(job) == EXPORT || !IS_TEXT_FILE_TYPE.getValue(job)); + private static final JobParamDefine IS_USE_HCATALOG = JobParams.define("sqoop.use.hcatalog", job -> MODE_ENUM.getValue(job) == EXPORT || !IS_TEXT_FILE_TYPE.getValue(job), SubExchangisJob.class); /** * Driver default 'com.mysql.jdbc.Driver' */ - private static final JobParamDefine CONNECT_DRIVER = JobParams.define("sqoop.args.driver", (BiFunction)(k, job) -> "com.mysql.jdbc.Driver"); + private static final JobParamDefine CONNECT_DRIVER = JobParams.define("sqoop.args.driver", job -> "com.mysql.jdbc.Driver", SubExchangisJob.class); /** * Protocol @@ -190,7 +193,7 @@ public enum MODE_TYPE { IMPORT, EXPORT} /** * Number of mapper */ - private static final JobParamDefine NUM_MAPPERS = JobParams.define("sqoop.args.num.mappers", (BiFunction) (k, job) -> { + private static final JobParamDefine NUM_MAPPERS = JobParams.define("sqoop.args.num.mappers", job -> { int numMappers = 1; JobParamSet settings = job.getRealmParams(REALM_JOB_SETTINGS); JobParam parallel = settings.get(JobParamConstraints.SETTINGS_MAX_PARALLEL); @@ -203,12 +206,12 @@ public enum MODE_TYPE { IMPORT, EXPORT} } } return numMappers; - }); + }, SubExchangisJob.class); /** * Connect string */ - private static final JobParamDefine CONNECT_STRING = JobParams.define("sqoop.args.connect", (BiFunction) (k, job) -> { + private static final JobParamDefine CONNECT_STRING = JobParams.define("sqoop.args.connect", job -> { JobParamSet paramSet = MODE_RDBMS_PARAMS.getValue(job); String host = paramSet.get(JobParamConstraints.HOST, String.class).getValue(); String database = paramSet.get(JobParamConstraints.DATABASE, String.class).getValue(); @@ -229,25 +232,25 @@ public enum MODE_TYPE { IMPORT, EXPORT} }).filter(StringUtils::isNotBlank).collect(Collectors.joining("&")); } return connectStr; - }); + }, SubExchangisJob.class); /** * Username */ - private static final JobParamDefine USERNAME = JobParams.define("sqoop.args.username", (BiFunction) (k, job) -> - MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.USERNAME, String.class).getValue()); + private static final JobParamDefine USERNAME = JobParams.define("sqoop.args.username", job -> + MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.USERNAME, String.class).getValue(), SubExchangisJob.class); /** * Password */ - private static final JobParamDefine PASSWORD = JobParams.define("sqoop.args.password", (BiFunction) (k, job) -> - MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.PASSWORD, String.class).getValue()); + private static final JobParamDefine PASSWORD = JobParams.define("sqoop.args.password", job -> + MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.PASSWORD, String.class).getValue(), SubExchangisJob.class); /** * Table */ - private static final JobParamDefine TABLE = JobParams.define("sqoop.args.table", (BiFunction) (k, job) -> - MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue()); + private static final JobParamDefine TABLE = JobParams.define("sqoop.args.table", job -> + MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue(), SubExchangisJob.class); /** * Import: Query string in params, //TODO where to use query @@ -257,7 +260,7 @@ public enum MODE_TYPE { IMPORT, EXPORT} /** * Import: Where */ - private static final JobParamDefine WHERE_CLAUSE = JobParams.define("sqoop.args.where", (BiFunction) (k, job) -> { + private static final JobParamDefine WHERE_CLAUSE = JobParams.define("sqoop.args.where", job -> { if (MODE_ENUM.getValue(job) == MODE_TYPE.IMPORT){ JobParam where = MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.WHERE); if (Objects.nonNull(where) && StringUtils.isNotBlank(where.getValue())){ @@ -265,33 +268,33 @@ public enum MODE_TYPE { IMPORT, EXPORT} } } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-import */ - private static final JobParamDefine HIVE_IMPORT = JobParams.define("sqoop.args.hive.import", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_IMPORT = JobParams.define("sqoop.args.hive.import", job -> { if (MODE_ENUM.getValue(job) == IMPORT && job.getSinkType().equalsIgnoreCase("hive") && !IS_USE_HCATALOG.getValue(job)){ return ""; } return null; - }); + }, SubExchangisJob.class); /** * Export: Hive-export */ - private static final JobParamDefine HIVE_EXPORT = JobParams.define("sqoop.hive.export", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_EXPORT = JobParams.define("sqoop.hive.export", job -> { if (MODE_ENUM.getValue(job) == EXPORT && job.getSourceType().equalsIgnoreCase("hive") && !IS_USE_HCATALOG.getValue(job)){ return ""; } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-overwrite */ - private static final JobParamDefine HIVE_OVERWRITE = JobParams.define("sqoop.args.hive.overwrite", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_OVERWRITE = JobParams.define("sqoop.args.hive.overwrite", job -> { if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ JobParam writeMode = MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE); if (Objects.nonNull(writeMode) && "overwrite".equalsIgnoreCase(writeMode.getValue())){ @@ -299,55 +302,55 @@ public enum MODE_TYPE { IMPORT, EXPORT} } } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-database */ - private static final JobParamDefine HIVE_DATABASE = JobParams.define("sqoop.args.hive.database", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_DATABASE = JobParams.define("sqoop.args.hive.database", job -> { if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.DATABASE, String.class).getValue(); } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-table */ - private static final JobParamDefine HIVE_TABLE = JobParams.define("sqoop.args.hive.table", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_TABLE = JobParams.define("sqoop.args.hive.table", job -> { if (Objects.nonNull(HIVE_DATABASE.getValue(job))) { return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue(); } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-partition-key */ - private static final JobParamDefine HIVE_PARTITION_KEY = JobParams.define("sqoop.args.hive.partition.key", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_PARTITION_KEY = JobParams.define("sqoop.args.hive.partition.key", job -> { AtomicReference keys = new AtomicReference<>(null); if (Objects.nonNull(HIVE_TABLE.getValue(job))){ Optional.ofNullable(PARTITION_MAP.getValue(job)).ifPresent(partitionMap -> keys.set(StringUtils.join(partitionMap.keySet(), ","))); } return keys.get(); - }); + }, SubExchangisJob.class); /** * Import: Hive-partition-values */ - private static final JobParamDefine HIVE_PARTITION_VALUE = JobParams.define("sqoop.args.hive.partition.value", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_PARTITION_VALUE = JobParams.define("sqoop.args.hive.partition.value", job -> { if (Objects.nonNull(HIVE_PARTITION_KEY.getValue(job))){ return StringUtils.join(PARTITION_MAP.getValue(job).values(), ","); } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-append */ - private static final JobParamDefine HIVE_APPEND = JobParams.define("sqoop.args.append", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_APPEND = JobParams.define("sqoop.args.append", job -> { // if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ // JobParam writeMode = MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE); // if (Objects.nonNull(writeMode) && "append".equalsIgnoreCase(writeMode.getValue())){ @@ -355,74 +358,74 @@ public enum MODE_TYPE { IMPORT, EXPORT} // } // } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-target-dir\] */ - private static final JobParamDefine HIVE_TARGET_DIR = JobParams.define("sqoop.args.target.dir", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_TARGET_DIR = JobParams.define("sqoop.args.target.dir", job -> { if (Objects.nonNull(HIVE_IMPORT.getValue(job)) && Objects.nonNull(QUERY_STRING.getValue(job))){ return "/user/linkis/exchangis/sqoop/" + HIVE_TABLE.getValue(job) + "/"; } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-delete-target-dir */ - private static final JobParamDefine HIVE_DELETE_TARGET = JobParams.define("sqoop.args.delete.target.dir", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_DELETE_TARGET = JobParams.define("sqoop.args.delete.target.dir", job -> { if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ return ""; } return null; - }); + }, SubExchangisJob.class); /** * Import: Hive-fields-terminated-by */ - private static final JobParamDefine HIVE_FIELDS_TERMINATED_BY = JobParams.define("sqoop.args.fields.terminated.by", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_FIELDS_TERMINATED_BY = JobParams.define("sqoop.args.fields.terminated.by", job -> { if (MODE_ENUM.getValue(job) == IMPORT && "hive".equalsIgnoreCase(job.getSinkType())){ return META_HADOOP_TABLE_PROPS.getValue(job).getOrDefault(META_FIELD_DELIMITER, "\u0001"); } return null; - }); + }, SubExchangisJob.class); /** * TODO get the properties from hive * Import: Hive-null-string */ - private static final JobParamDefine HIVE_NULL_STRING = JobParams.define("sqoop.args.null.string", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_NULL_STRING = JobParams.define("sqoop.args.null.string", job -> { if (MODE_ENUM.getValue(job) == IMPORT && "hive".equalsIgnoreCase(job.getSinkType())){ return "\\\\N"; } return null; - }); + }, SubExchangisJob.class); /** * TODO get the properties from hive * Import: Hive-null-non-string */ - private static final JobParamDefine HIVE_NULL_NON_STRING = JobParams.define("sqoop.args.null.non.string", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_NULL_NON_STRING = JobParams.define("sqoop.args.null.non.string", job -> { if (MODE_ENUM.getValue(job) == IMPORT && "hive".equalsIgnoreCase(job.getSinkType())){ return "\\\\N"; } return null; - }); + }, SubExchangisJob.class); /** * TODO get the properties from hive to build the export directory * Export: Export-dir */ - private static final JobParamDefine EXPORT_DIR = JobParams.define("sqoop.args.export.dir", (BiFunction) (k, job) -> { + private static final JobParamDefine EXPORT_DIR = JobParams.define("sqoop.args.export.dir", job -> { if (Objects.nonNull(HIVE_EXPORT.getValue(job))){ } return null; - }); + }, SubExchangisJob.class); /** * Export: Update-key */ - private static final JobParamDefine UPDATE_KEY = JobParams.define("sqoop.args.update.key", (BiFunction) (k, job) -> { + private static final JobParamDefine UPDATE_KEY = JobParams.define("sqoop.args.update.key", job -> { if (MODE_ENUM.getValue(job) == EXPORT ){ JobParam writeMode = MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE, String.class); if (Objects.nonNull(writeMode) && StringUtils.isNotBlank(writeMode.getValue()) && !"insert".equalsIgnoreCase(writeMode.getValue())){ @@ -431,127 +434,111 @@ public enum MODE_TYPE { IMPORT, EXPORT} } } return null; - }); + }, SubExchangisJob.class); /** * Export: Update mode */ - private static final JobParamDefine UPDATE_MODE = JobParams.define("sqoop.args.update.mode", (BiFunction) (k, job) -> { + private static final JobParamDefine UPDATE_MODE = JobParams.define("sqoop.args.update.mode", job -> { if (StringUtils.isNotBlank(UPDATE_KEY.getValue(job))){ JobParam writeMode = MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE, String.class); return "update".equals(writeMode.getValue())? "allowinsert" : "updateonly"; } return null; - }); + }, SubExchangisJob.class); /** * Export: Hcatalog-database */ - private static final JobParamDefine HCATALOG_DATABASE = JobParams.define("sqoop.args.hcatalog.database", (BiFunction) (k, job) ->{ + private static final JobParamDefine HCATALOG_DATABASE = JobParams.define("sqoop.args.hcatalog.database", job ->{ if (IS_USE_HCATALOG.getValue(job)){ return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.DATABASE, String.class).getValue(); } return null; - }); + }, SubExchangisJob.class); /** * Export: Hcatalog-table */ - private static final JobParamDefine HCATALOG_TABLE = JobParams.define("sqoop.args.hcatalog.table", (BiFunction) (k, job) ->{ + private static final JobParamDefine HCATALOG_TABLE = JobParams.define("sqoop.args.hcatalog.table", job ->{ if (Objects.nonNull(HCATALOG_DATABASE.getValue(job))){ return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue(); } return null; - }); + }, SubExchangisJob.class); /** * Export: Hcatalog-partition-key */ - private static final JobParamDefine HCATALOG_PARTITION_KEY = JobParams.define("sqoop.args.hcatalog.partition.keys", (BiFunction) (k, job) -> { + private static final JobParamDefine HCATALOG_PARTITION_KEY = JobParams.define("sqoop.args.hcatalog.partition.keys", job -> { AtomicReference keys = new AtomicReference<>(null); if (Objects.nonNull(HCATALOG_TABLE.getValue(job))){ Optional.ofNullable(PARTITION_MAP.getValue(job)).ifPresent(partitionMap -> keys.set(StringUtils.join(partitionMap.keySet(), ","))); } return keys.get(); - }); + }, SubExchangisJob.class); /** * Export: Hcatalog-partition-values */ - private static final JobParamDefine HCATALOG_PARTITION_VALUE = JobParams.define("sqoop.args.hcatalog.partition.values", (BiFunction) (k, job) -> { + private static final JobParamDefine HCATALOG_PARTITION_VALUE = JobParams.define("sqoop.args.hcatalog.partition.values", job -> { if (Objects.nonNull(HCATALOG_PARTITION_KEY.getValue(job))){ return StringUtils.join(PARTITION_MAP.getValue(job).values(), ","); } return null; - }); + }, SubExchangisJob.class); /** * TODO get the properties from hive * Export: Hive-input-fields-terminated-by */ - private static final JobParamDefine HIVE_INPUT_FIELDS_TERMINATED_KEY = JobParams.define("sqoop.args.input.fields.terminated.by", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_INPUT_FIELDS_TERMINATED_KEY = JobParams.define("sqoop.args.input.fields.terminated.by", job -> { if (MODE_ENUM.getValue(job) == EXPORT && "hive".equalsIgnoreCase(job.getSourceType())){ return META_HADOOP_TABLE_PROPS.getValue(job).getOrDefault(META_FIELD_DELIMITER, "\u0001"); } return null; - }); + }, SubExchangisJob.class); /** * TODO get the properties from hive * Export: Hive-input-null-string */ - private static final JobParamDefine HIVE_INPUT_NULL_STRING = JobParams.define("sqoop.args.input.null.string", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_INPUT_NULL_STRING = JobParams.define("sqoop.args.input.null.string", job -> { if (MODE_ENUM.getValue(job) == EXPORT && "hive".equalsIgnoreCase(job.getSourceType())){ return "\\\\N"; } return null; - }); + }, SubExchangisJob.class); /** * TODO get the properties from hive * Export: Hive-input-null-non-string */ - private static final JobParamDefine HIVE_INPUT_NULL_NON_STRING = JobParams.define("sqoop.args.input.null.non.string", (BiFunction) (k, job) -> { + private static final JobParamDefine HIVE_INPUT_NULL_NON_STRING = JobParams.define("sqoop.args.input.null.non.string", job -> { if (MODE_ENUM.getValue(job) == EXPORT && "hive".equalsIgnoreCase(job.getSourceType())){ return "\\\\N"; } return null; - }); + }, SubExchangisJob.class); - /** - * Mapping params (ExchangisJobContent -> transforms -> mapping) - */ - private static final JobParamDefine>> TRANSFORM_MAPPING = JobParams.define("sqoop.transform.mapping", "mapping"); - /** - * Source field name in mapping - */ - private static final JobParamDefine SOURCE_FIELD_NAME = JobParams.define("sqoop.source.name", "source_field_name", String.class); - /** - * Sink field name in mapping - */ - private static final JobParamDefine SINK_FIELD_NAME = JobParams.define("sqoop.sink.name", "sink_field_name", String.class); - /** * Column serializer */ - private static final JobParamDefine COLUMN_SERIAL = JobParams.define("sqoop.args.columns", (BiFunction) (key, job) -> { - List> mappings = TRANSFORM_MAPPING.getValue(job.getRealmParams(SubExchangisJob.REALM_JOB_COLUMN_MAPPING)); + private static final JobParamDefine COLUMN_SERIAL = JobParams.define("sqoop.args.columns", job -> { List columnSerial = new ArrayList<>(); - if (Objects.nonNull(mappings)) { - if (SUPPORT_RDBMS_TYPES.contains(job.getSourceType().toUpperCase())) { - mappings.forEach(mapping -> Optional.ofNullable(SOURCE_FIELD_NAME.newParam(mapping).getValue()).ifPresent(columnSerial::add)); - } else if (SUPPORT_RDBMS_TYPES.contains(job.getSinkType().toUpperCase())) { - mappings.forEach(mapping -> Optional.ofNullable(SINK_FIELD_NAME.newParam(mapping).getValue()).ifPresent(columnSerial::add)); - } + if (SUPPORT_RDBMS_TYPES.contains(job.getSourceType().toUpperCase())) { + job.getSourceColumns().forEach(columnDefine -> columnSerial.add(columnDefine.getName())); + } else if (SUPPORT_RDBMS_TYPES.contains(job.getSinkType().toUpperCase())) { + job.getSinkColumns().forEach(columnDefine -> columnSerial.add(columnDefine.getName())); } return StringUtils.join(columnSerial, ","); - }); + }, SubExchangisJob.class); /** * Inspection of the definitions above */ - private static final JobParamDefine DEFINE_INSPECTION = JobParams.define("", (BiFunction) (key, job) -> { + private static final JobParamDefine DEFINE_INSPECTION = JobParams.define("", job -> { List rdbmsColumns = new ArrayList<>(Arrays.asList(COLUMN_SERIAL.getValue(job).split(","))); List hadoopColumns = META_HADOOP_COLUMNS.getValue(job).stream().map(MetaColumn::getName) .collect(Collectors.toList()); @@ -566,7 +553,7 @@ public enum MODE_TYPE { IMPORT, EXPORT} job.getName(), job.getId()); } return null; - }); + }, SubExchangisJob.class); @Override public int priority() { return 1; @@ -589,7 +576,7 @@ public ExchangisEngineJob buildJob(SubExchangisJob inputJob, ExchangisEngineJob engineJob.setCreateUser(inputJob.getCreateUser()); return engineJob; } catch (Exception e) { - throw new ExchangisJobException(ExchangisJobExceptionCode.ENGINE_JOB_ERROR.getCode(), + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_ENGINE_ERROR.getCode(), "Fail to build sqoop engine job, message:[" + e.getMessage() + "]", e); } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java index b9f66b5c5..fe06a540c 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java @@ -1,14 +1,19 @@ package com.webank.wedatasphere.exchangis.job.server.builder.transform; +import com.webank.wedatasphere.exchangis.common.linkis.bml.BmlResource; import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent; import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; -import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.AbstractLoggingExchangisJobBuilder; import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.SubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformProcessorDao; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; import org.apache.commons.lang.StringUtils; import org.apache.linkis.common.exception.ErrorException; import org.apache.linkis.common.utils.ClassUtils; @@ -23,7 +28,7 @@ /** * TransformJob builder */ -public class GenericExchangisTransformJobBuilder extends AbstractExchangisJobBuilder { +public class GenericExchangisTransformJobBuilder extends AbstractLoggingExchangisJobBuilder { private static final Logger LOG = LoggerFactory.getLogger(GenericExchangisTransformJobBuilder.class); @@ -32,6 +37,11 @@ public class GenericExchangisTransformJobBuilder extends AbstractExchangisJobBui */ private static final Map handlerHolders = new ConcurrentHashMap<>(); + /** + * Transform dao + */ + private JobTransformProcessorDao transformProcessorDao; + public synchronized void initHandlers() { //Should define wds.linkis.reflect.scan.package in properties Set> jobHandlerSet = ClassUtils.reflections().getSubTypesOf(SubExchangisJobHandler.class); @@ -78,10 +88,11 @@ public TransformExchangisJob buildJob(ExchangisJobInfo inputJob, TransformExchan inputJob.getId(), inputJob.getName(), contents.size()); //Second to new SubExchangisJob instances List subExchangisJobs = contents.stream().map(job -> { - TransformExchangisJob.SubExchangisJobAdapter jobAdapter = new TransformExchangisJob.SubExchangisJobAdapter(job); - jobAdapter.setId(inputJob.getId()); - jobAdapter.setCreateUser(outputJob.getCreateUser()); - return jobAdapter; + TransformExchangisJob.TransformSubExchangisJob transformSubJob = new TransformExchangisJob.TransformSubExchangisJob(job); + transformSubJob.setId(inputJob.getId()); + transformSubJob.setCreateUser(outputJob.getCreateUser()); + setTransformCodeResource(transformSubJob); + return transformSubJob; }) .collect(Collectors.toList()); outputJob.setSubJobSet(subExchangisJobs); @@ -118,19 +129,45 @@ public TransformExchangisJob buildJob(ExchangisJobInfo inputJob, TransformExchan } } }else{ - throw new ExchangisJobException(ExchangisJobExceptionCode.TRANSFORM_JOB_ERROR.getCode(), + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_TRANSFORM_ERROR.getCode(), "Illegal content string: [" + inputJob.getJobContent() + "] in job, please check", null); } }else{ LOG.warn("It looks like an empty job ? id: [{}], name: [{}]", inputJob.getId(), inputJob.getName()); } }catch(Exception e){ - throw new ExchangisJobException(ExchangisJobExceptionCode.TRANSFORM_JOB_ERROR.getCode(), + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_TRANSFORM_ERROR.getCode(), "Fail to build transformJob from input job, message: [" + e.getMessage() + "]", e); } return outputJob; } + /** + * Set the code resource to transform job + * @param subExchangisJob sub transform job + */ + private void setTransformCodeResource(TransformExchangisJob.TransformSubExchangisJob subExchangisJob){ + if (subExchangisJob.getTransformType() == TransformTypes.PROCESSOR){ + TransformProcessor processor = getTransformProcessorDao().getProcInfo( + Long.valueOf(subExchangisJob.getJobInfoContent().getTransforms().getCodeId())); + if (Objects.nonNull(processor)){ + // TODO maybe the content of processor doesn't store in bml + subExchangisJob.addCodeResource(new + BmlResource(processor.getCodeBmlResourceId(), processor.getCodeBmlVersion())); + } + } + } + + /** + * Processor dao + * @return dao + */ + private JobTransformProcessorDao getTransformProcessorDao(){ + if (null == transformProcessorDao) { + this.transformProcessorDao = SpringContextHolder.getBean(JobTransformProcessorDao.class); + } + return this.transformProcessorDao; + } /** * Chain */ @@ -178,10 +215,5 @@ public void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderConte } } - public static void main(String[] args) { - String code = "[{\"subJobs\":[{\"subJobName\":\"Copy ID\",\"dataSourceIds\":{\"source\":{\"type\":\"MYSQL\",\"id\":\"111\",\"ds\":\"MYSQL_LIU\",\"db\":\"ide_gz_bdap_sit_01\",\"table\":\"dss_project_publish_history\"},\"sink\":{\"type\":\"HIVE\",\"id\":\"113\",\"ds\":\"hive_uat\",\"db\":\"hduser05db\",\"table\":\"dss_project_publish_history\"}},\"params\":{\"sources\":[{\"key\":\"where\",\"field\":\"where\",\"label\":\"WHERE条件\",\"sort\":2,\"value\":\"\",\"defaultValue\":\"\",\"unit\":\"\",\"required\":false,\"validateType\":\"REGEX\",\"validateRange\":\"^[\\\\s\\\\S]{0,500}$\",\"validateMsg\":\"WHERE条件输入过长\",\"source\":\"\",\"type\":\"INPUT\"}],\"sinks\":[{\"key\":\"writeMode\",\"field\":\"writeMode\",\"label\":\"写入方式(OVERWRITE只对TEXT类型表生效)\",\"values\":[\"OVERWRITE\",\"APPEND\"],\"value\":\"OVERWRITE\",\"defaultValue\":\"OVERWRITE\",\"sort\":1,\"unit\":\"\",\"required\":true,\"type\":\"OPTION\"},{\"key\":\"partition\",\"field\":\"partition\",\"label\":\"分区信息(文本)\",\"sort\":2,\"value\":null,\"defaultValue\":null,\"unit\":\"\",\"required\":false,\"validateType\":\"REGEX\",\"validateRange\":\"^[\\\\s\\\\S]{0,50}$\",\"validateMsg\":\"分区信息过长\",\"source\":\"/api/rest_j/v1/exchangis/datasources/render/partition/element/map\",\"type\":\"MAP\"}]},\"transforms\":{\"addEnable\":false,\"type\":\"MAPPING\",\"sql\":null,\"mapping\":[{\"validator\":null,\"transformer\":null,\"source_field_name\":\"id\",\"source_field_type\":\"BIGINT\",\"sink_field_name\":\"id\",\"sink_field_type\":\"int\",\"deleteEnable\":false,\"source_field_index\":0,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"project_version_id\",\"source_field_type\":\"BIGINT\",\"sink_field_name\":\"project_version_id\",\"sink_field_type\":\"int\",\"deleteEnable\":false,\"source_field_index\":1,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"creator_id\",\"source_field_type\":\"BIGINT\",\"sink_field_name\":\"creator_id\",\"sink_field_type\":\"int\",\"deleteEnable\":false,\"source_field_index\":2,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"create_time\",\"source_field_type\":\"DATETIME\",\"sink_field_name\":\"create_time\",\"sink_field_type\":\"date\",\"deleteEnable\":false,\"source_field_index\":3,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"state\",\"source_field_type\":\"TINYINT\",\"sink_field_name\":\"state\",\"sink_field_type\":\"boolean\",\"deleteEnable\":false,\"source_field_index\":4,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false}]},\"settings\":[{\"key\":\"setting.max.parallelism\",\"field\":\"setting.max.parallelism\",\"label\":\"作业最大并行数\",\"sort\":1,\"value\":\"1\",\"defaultValue\":\"1\",\"unit\":\"个\",\"required\":true,\"validateType\":\"REGEX\",\"validateRange\":\"^[1-9]\\\\d*$\",\"validateMsg\":\"作业最大并行数输入错误\",\"source\":\"\",\"type\":\"INPUT\"},{\"key\":\"setting.max.memory\",\"field\":\"setting.max.memory\",\"label\":\"作业最大内存\",\"sort\":2,\"value\":\"1048\",\"defaultValue\":\"1024\",\"unit\":\"Mb\",\"required\":true,\"validateType\":\"REGEX\",\"validateRange\":\"^[1-9]\\\\d*$\",\"validateMsg\":\"作业最大内存输入错误\",\"source\":\"\",\"type\":\"INPUT\"}]},{\"subJobName\":\"tTFeeaPBfbZJ\",\"dataSourceIds\":{\"source\":{\"type\":\"MYSQL\",\"id\":\"111\",\"ds\":\"MYSQL_LIU\",\"db\":\"ide_gz_bdap_sit_01\",\"table\":\"dss_project_publish_history\"},\"sink\":{\"type\":\"HIVE\",\"id\":\"113\",\"ds\":\"hive_uat\",\"db\":\"hduser05db\",\"table\":\"dss_project_publish_history_text\"}},\"params\":{\"sources\":[{\"key\":\"where\",\"field\":\"where\",\"label\":\"WHERE条件\",\"sort\":2,\"value\":\"\",\"defaultValue\":\"\",\"unit\":\"\",\"required\":false,\"validateType\":\"REGEX\",\"validateRange\":\"^[\\\\s\\\\S]{0,500}$\",\"validateMsg\":\"WHERE条件输入过长\",\"source\":\"\",\"type\":\"INPUT\"}],\"sinks\":[{\"key\":\"writeMode\",\"field\":\"writeMode\",\"label\":\"写入方式(OVERWRITE只对TEXT类型表生效)\",\"values\":[\"OVERWRITE\",\"APPEND\"],\"value\":\"OVERWRITE\",\"defaultValue\":\"OVERWRITE\",\"sort\":1,\"unit\":\"\",\"required\":true,\"type\":\"OPTION\"},{\"key\":\"partition\",\"field\":\"partition\",\"label\":\"分区信息(文本)\",\"sort\":2,\"value\":null,\"defaultValue\":null,\"unit\":\"\",\"required\":false,\"validateType\":\"REGEX\",\"validateRange\":\"^[\\\\s\\\\S]{0,50}$\",\"validateMsg\":\"分区信息过长\",\"source\":\"/api/rest_j/v1/exchangis/datasources/render/partition/element/map\",\"type\":\"MAP\"}]},\"transforms\":{\"addEnable\":false,\"type\":\"MAPPING\",\"sql\":null,\"mapping\":[{\"validator\":null,\"transformer\":null,\"source_field_name\":\"id\",\"source_field_type\":\"BIGINT\",\"sink_field_name\":\"id\",\"sink_field_type\":\"int\",\"deleteEnable\":true,\"source_field_index\":0,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"project_version_id\",\"source_field_type\":\"BIGINT\",\"sink_field_name\":\"project_version_id\",\"sink_field_type\":\"int\",\"deleteEnable\":true,\"source_field_index\":1,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"create_time\",\"source_field_type\":\"DATETIME\",\"sink_field_name\":\"creator_id\",\"sink_field_type\":\"int\",\"deleteEnable\":true,\"source_field_index\":2,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"creator_id\",\"source_field_type\":\"BIGINT\",\"sink_field_name\":\"create_time\",\"sink_field_type\":\"string\",\"deleteEnable\":true,\"source_field_index\":3,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false},{\"validator\":null,\"transformer\":null,\"source_field_name\":\"update_time\",\"source_field_type\":\"DATETIME\",\"sink_field_name\":\"state\",\"sink_field_type\":\"string\",\"deleteEnable\":true,\"source_field_index\":4,\"sink_field_index\":0,\"source_field_editable\":true,\"sink_field_editable\":false}]},\"settings\":[{\"key\":\"setting.max.parallelism\",\"field\":\"setting.max.parallelism\",\"label\":\"作业最大并行数\",\"sort\":1,\"value\":\"1\",\"defaultValue\":\"1\",\"unit\":\"个\",\"required\":true,\"validateType\":\"REGEX\",\"validateRange\":\"^[1-9]\\\\d*$\",\"validateMsg\":\"作业最大并行数输入错误\",\"source\":\"\",\"type\":\"INPUT\"},{\"key\":\"setting.max.memory\",\"field\":\"setting.max.memory\",\"label\":\"作业最大内存\",\"sort\":2,\"value\":\"1024\",\"defaultValue\":\"1024\",\"unit\":\"Mb\",\"required\":true,\"validateType\":\"REGEX\",\"validateRange\":\"^[1-9]\\\\d*$\",\"validateMsg\":\"作业最大内存输入错误\",\"source\":\"\",\"type\":\"INPUT\"}]}]}]"; - List contents = Json.fromJson(code, List.class, ExchangisJobInfoContent.class); - System.out.println(contents.get(0).getSubJobName()); - System.out.println(contents); - } + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java index 7d1d24d6c..f2fb0ce24 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java @@ -1,16 +1,16 @@ package com.webank.wedatasphere.exchangis.job.server.builder.transform; +import com.webank.wedatasphere.exchangis.common.linkis.bml.BmlResource; import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; -import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent; -import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobParamsContent; +import com.webank.wedatasphere.exchangis.datasource.core.vo.*; import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; import com.webank.wedatasphere.exchangis.job.domain.GenericExchangisJob; import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; -import com.webank.wedatasphere.exchangis.job.server.builder.engine.DataxExchangisEngineJob; import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.GenericSubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; import com.webank.wedatasphere.exchangis.job.server.utils.JobUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -53,19 +53,37 @@ public void setSubJobSet(List subJobSet) { /** * Wrap entity of 'ExchangisJobInfoContent' */ - public static class SubExchangisJobAdapter extends SubExchangisJob{ + public static class TransformSubExchangisJob extends SubExchangisJob{ + + private static final String CODE_RESOURCE_NAME = ".code"; + /** + * Transform type + */ + private TransformTypes transformType; /** * Content VO */ private ExchangisJobInfoContent jobInfoContent; - public SubExchangisJobAdapter(ExchangisJobInfoContent jobInfoContent){ + /** + * Resource map + */ + private final Map resources = new HashMap<>(); + + public TransformSubExchangisJob(ExchangisJobInfoContent jobInfoContent){ if(Objects.nonNull(jobInfoContent)) { this.jobInfoContent = jobInfoContent; this.engineType = jobInfoContent.getEngine(); this.name = jobInfoContent.getSubJobName(); convertContentToParams(jobInfoContent); + Optional.ofNullable(jobInfoContent.getTransforms()).ifPresent(transforms -> { + if (StringUtils.isNotBlank(transforms.getType())) { + this.transformType = TransformTypes.valueOf(transforms.getType().toUpperCase(Locale.ROOT)); + // TODO define different transform sub jobs + convertTransformToColumnDefine(transforms); + } + }); } } @@ -77,9 +95,66 @@ public void setJobInfoContent(ExchangisJobInfoContent jobInfoContent) { this.jobInfoContent = jobInfoContent; } + /** + * Convert content to column definitions + * @param transforms transform + */ + private void convertTransformToColumnDefine(ExchangisJobTransformsContent transforms){ + List items = transforms.getMapping(); + if (Objects.nonNull(items)){ + for(int i = 0; i < items.size(); i++){ + final int index = i; + ExchangisJobTransformsItem item = items.get(i); + ColumnDefine srcColumn = new ColumnDefine(item.getSourceFieldName(), + item.getSourceFieldType(), item.getSourceFieldIndex()); + ColumnDefine sinkColumn = new ColumnDefine(item.getSinkFieldName(), + item.getSinkFieldType(), item.getSinkFieldIndex()); + Optional.ofNullable(item.getValidator()).ifPresent(validator -> + convertValidatorFunction(index, validator)); + Optional.ofNullable(item.getTransformer()).ifPresent(transformer -> + convertTransformFunction(index, transformer)); + getSourceColumns().add(srcColumn); + getSinkColumns().add(sinkColumn); + }; + } + } + /** + * Convert to validator function + * @param index index + * @param validator validator + */ + private void convertValidatorFunction(int index, List validator){ + if (validator.size() > 0) { + ColumnFunction function = new ColumnFunction(); + function.setIndex(index); + // TODO abstract the name + function.setName("dx_filter"); + function.setParams(new ArrayList<>(validator)); + getColumnFunctions().add(function); + } + } + + /** + * Convert to transform function + * @param index index + * @param transformer transformer + */ + private void convertTransformFunction(int index, ExchangisJobTransformer transformer){ + if (StringUtils.isNotBlank(transformer.getName())) { + ColumnFunction function = new ColumnFunction(); + function.setIndex(index); + function.setName(transformer.getName()); + function.setParams(transformer.getParams()); + getColumnFunctions().add(function); + } + } + /** + * Convert content to params + * @param content content + */ private void convertContentToParams(ExchangisJobInfoContent content){ setIntoParams(REALM_JOB_DATA_SOURCE, () -> Json.convert(content.getDataSources(), Map.class, String.class, String.class)); - setIntoParams(REALM_JOB_COLUMN_MAPPING, () -> Json.convert(content.getTransforms(), Map.class, String.class, Object.class)); +// setIntoParams(REALM_JOB_COLUMN_MAPPING, () -> Json.convert(content.getTransforms(), Map.class, String.class, Object.class)); if(Objects.nonNull(content.getParams())){ if(Objects.nonNull(content.getParams().getSources())) { List items = content.getParams().getSources(); @@ -160,29 +235,43 @@ private String resolveDataSourceId(String dataSourceId, JobParamSet paramSet){ * @param items * @return 用于转换时间分区 */ - private void timePlaceHolderConvert(List items) { - for (ExchangisJobParamsContent.ExchangisJobParamsItem exchangisJobParamsItem : items) { - if (("partition".equals(exchangisJobParamsItem.getConfigKey()) ) && exchangisJobParamsItem.getConfigValue() != null) { - Map partitionValue = (Map) exchangisJobParamsItem.getConfigValue(); - assert partitionValue != null; - Calendar calendar = Calendar.getInstance(); - if (!partitionValue.get("ds").isEmpty()) { - partitionValue.put("ds", JobUtils.renderDt(partitionValue.get("ds"), calendar)); + items.forEach(item -> { + Object value = item.getConfigValue(); + if (value instanceof String){ + item.setConfigValue(JobUtils.replaceVariable((String)value, new HashMap<>())); + } else if (value instanceof Map){ + for (Object key:((Map) value).keySet()) { + ((Map) value).put(key, JobUtils.replaceVariable(((String)((Map) value).get(key)), new HashMap<>())); } - LOG.info("Time placeholder transform value: {}", partitionValue.get("ds")); - exchangisJobParamsItem.setConfigValue(partitionValue); - } - else if ("where".equals(exchangisJobParamsItem.getConfigKey())) { - String partitionValue = exchangisJobParamsItem.getConfigValue().toString(); - assert partitionValue != null; - Calendar calendar = Calendar.getInstance(); - partitionValue = JobUtils.renderDt(partitionValue, calendar); - LOG.info("Time placeholder transform value: {}", partitionValue); - exchangisJobParamsItem.setConfigValue(partitionValue); } - } + }); + } + + /** + * Transform type + * @return type string + */ + public TransformTypes getTransformType() { + return transformType; + } + + /** + * Add code resource + * @param bmlResource bml resource + */ + void addCodeResource(BmlResource bmlResource){ + this.resources.put(CODE_RESOURCE_NAME, bmlResource); } + /** + * Get code resource + * @return bml resource + */ + public BmlResource getCodeResource(){ + return this.resources.get(CODE_RESOURCE_NAME); + } } + + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java new file mode 100644 index 000000000..1d190cbaa --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java @@ -0,0 +1,116 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Objects; +import java.util.Optional; + +/** + * Job handler refer job builder + */ +public abstract class AbstractLoggingSubExchangisJobHandler implements SubExchangisJobHandler{ + + private static final ThreadLocal springContext = new ThreadLocal<>(); + + @Override + public final void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + wrapFuncWithContext(ctx, () -> { + try { + handleJobSource(subExchangisJob, ctx); + }catch (ErrorException e){ + throw new ExchangisJobException.Runtime(-1, "Exception in handling job source parameters", e); + } + }); + } + + @Override + public final void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + wrapFuncWithContext(ctx, () -> { + try { + handleJobSink(subExchangisJob, ctx); + } catch (ErrorException e) { + throw new ExchangisJobException.Runtime(-1, "Exception in handling job sink parameters", e); + } + }); + } + + /** + * Wrap the function(runnable) with context + * @param context context + * @param runnable function + */ + private void wrapFuncWithContext(ExchangisJobBuilderContext context, Runnable runnable){ + if (context instanceof SpringExchangisJobBuilderContext){ + springContext.set((SpringExchangisJobBuilderContext)context); + // Rest the default param set + JobParamSet storedParamSet = JobParamDefine.defaultParam.get(); + JobParamDefine.defaultParam.set(new JobParamSet()); + try{ + runnable.run(); + } finally { + springContext.remove(); + // Restore the default param set + if (Objects.nonNull(storedParamSet)){ + JobParamDefine.defaultParam.set(storedParamSet); + } else { + JobParamDefine.defaultParam.remove(); + } + } + } else { + runnable.run(); + } + } + + /** + * handle job source params + * @param subExchangisJob sub exchangis job + * @param ctx ctx + */ + public abstract void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + + /** + * handle job sink params + * @param subExchangisJob sub exchangis job + * @param ctx ctx + */ + public abstract void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + + /** + * Warn message + * @param message message + */ + public static void warn(String message, Object... args){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().warn(null, message, args)); + } + + public static void warn(String message, Throwable t){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().warn(null, message, t)); + } + + /** + * Info message + * @param message message + */ + public static void info(String message, Object... args){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().info(null, message, args)); + } + + public static void info(String message, Throwable t){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().info(null, message, t)); + } + + public static T getBean(Class clazz){ + return Objects.nonNull(springContext.get())? springContext.get().getBean(clazz) : null; + } + + protected static SpringExchangisJobBuilderContext getJobBuilderContext(){ + return springContext.get(); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AuthEnabledSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AuthEnabledSubExchangisJobHandler.java new file mode 100644 index 000000000..b290e8643 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AuthEnabledSubExchangisJobHandler.java @@ -0,0 +1,52 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; + +import java.io.ByteArrayOutputStream; +import java.io.ObjectOutputStream; +import java.util.Objects; + +/** + * With authentication + */ +public abstract class AuthEnabledSubExchangisJobHandler extends AbstractLoggingSubExchangisJobHandler{ + /** + * Disable encrypt + */ + protected static final JobParamDefine ENCRYPT_DISABLE = JobParams.define("encrypt.disable"); + + /** + * User name + */ + protected static final JobParamDefine USERNAME = JobParams.define(JobParamConstraints.USERNAME); + + /** + * Password + */ + protected static final JobParamDefine PASSWORD = JobParams.define(JobParamConstraints.PASSWORD, paramSet -> { + JobParam password = paramSet.get(JobParamConstraints.PASSWORD); + if (Objects.nonNull(password) && StringUtils.isNotBlank(password.getValue())) { + Boolean encrypt = ENCRYPT_DISABLE.getValue(paramSet); + if (Objects.isNull(encrypt) || !encrypt) { + try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { + try (ObjectOutputStream oos = new ObjectOutputStream(bos)) { + oos.writeObject(password.getValue()); + oos.flush(); + } + return new String(new Base64().encode(bos.toByteArray())); + } catch (Exception e) { + throw new ExchangisJobException.Runtime(-1, "Fail to encrypt password", e); + } + } + return password.getValue(); + } + return null; + }); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java index 51e50fba1..a3791cb80 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java @@ -18,13 +18,13 @@ /** * Abstract implement, to fetch the data source params of job */ -public class GenericSubExchangisJobHandler implements SubExchangisJobHandler{ +public class GenericSubExchangisJobHandler extends AbstractLoggingSubExchangisJobHandler{ public static final String ID_SPLIT_SYMBOL = "\\."; - private static final JobParamDefine SOURCE_ID = JobParams.define("source_id", String.class); + private static final JobParamDefine SOURCE_ID = JobParams.define("source_id"); - private static final JobParamDefine SINK_ID = JobParams.define("sink_id", String.class); + private static final JobParamDefine SINK_ID = JobParams.define("sink_id"); @Override public String dataSourceType() { @@ -32,22 +32,24 @@ public String dataSourceType() { } @Override - public void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { ExchangisJobInfo originJob = ctx.getOriginalJob(); JobParamSet idParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_DATA_SOURCE); JobParamSet sourceParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); if (Objects.nonNull(idParamSet) && Objects.nonNull(sourceParamSet)){ + info("Fetch data source parameters in [{}]", subExchangisJob.getSourceType()); appendDataSourceParams(idParamSet.load(SOURCE_ID), sourceParamSet, originJob.getCreateUser()); } } @Override - public void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException{ + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException{ ExchangisJobInfo originJob = ctx.getOriginalJob(); JobParamSet idParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_DATA_SOURCE); JobParamSet sinkParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); if (Objects.nonNull(idParamSet) && Objects.nonNull(sinkParamSet)){ + info("Fetch data source parameters in [{}]", subExchangisJob.getSinkType()); appendDataSourceParams(idParamSet.load(SINK_ID), sinkParamSet, originJob.getCreateUser()); } } @@ -72,6 +74,12 @@ private void appendDataSourceParams(JobParam idParam, JobParamSet paramS } } } + + @Override + public int order() { + return Integer.MIN_VALUE; + } + public static class DataSourceService{ /** diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MongoDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MongoDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..ee1f2289a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MongoDataxSubExchangisJobHandler.java @@ -0,0 +1,82 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +/** + * Params mapping for mongo in datax + */ +public class MongoDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Database + */ + private static final JobParamDefine DATABASE = JobParams.define("dbName", JobParamConstraints.DATABASE); + + /** + * Host + */ + private static final JobParamDefine SOURCE_HOST = JobParams.define("conn_ins[0].host", JobParamConstraints.HOST); + private static final JobParamDefine SINK_HOST = JobParams.define("conn_ins[0].host", JobParamConstraints.HOST); + + /** + * Port + */ + private static final JobParamDefine SOURCE_PORT = JobParams.define("conn_ins[0].port", JobParamConstraints.PORT); + private static final JobParamDefine SINK_PORT = JobParams.define("conn_ins[0].port", JobParamConstraints.PORT); + + /** + * Connect params + */ + private static final JobParamDefine> OPTION_PARAMS = JobParams.define("optionParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + /** + * Collection name(table) + */ + private static final JobParamDefine COLLECTION_NAME = JobParams.define("collectionName", JobParamConstraints.TABLE); + + @Override + public String dataSourceType() { + return "mongodb"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + } + } + + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[]{SOURCE_HOST, SOURCE_PORT, USERNAME, PASSWORD, DATABASE, COLLECTION_NAME, OPTION_PARAMS}; + } + + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{SINK_HOST, SINK_PORT, USERNAME, PASSWORD, DATABASE, COLLECTION_NAME, OPTION_PARAMS}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..a7ec7b9cd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java @@ -0,0 +1,117 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.utils.SQLCommandUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * Mysql in datax + */ +public class MySQLDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Database + */ + private static final JobParamDefine SOURCE_DATABASE = JobParams.define("connection[0].jdbcUrl[0].database", JobParamConstraints.DATABASE); + private static final JobParamDefine SINK_DATABASE = JobParams.define("connection[0].jdbcUrl.database", JobParamConstraints.DATABASE); + + /** + * Table + */ + private static final JobParamDefine SINK_TABLE = JobParams.define("connection[0].table[0]", JobParamConstraints.TABLE); + + /** + * Host + */ + private static final JobParamDefine SOURCE_HOST = JobParams.define("connection[0].jdbcUrl[0].host", JobParamConstraints.HOST); + private static final JobParamDefine SINK_HOST = JobParams.define("connection[0].jdbcUrl.host", JobParamConstraints.HOST); + + /** + * Port + */ + private static final JobParamDefine SOURCE_PORT = JobParams.define("connection[0].jdbcUrl[0].port", JobParamConstraints.PORT); + private static final JobParamDefine SINK_PORT = JobParams.define("connection[0].jdbcUrl.port", JobParamConstraints.PORT); + + /** + * Connect params + */ + private static final JobParamDefine> SOURCE_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl[0].connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + private static final JobParamDefine> SINK_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl.connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + /** + * Where condition + */ + private static final JobParamDefine WHERE_CONDITION = JobParams.define(JobParamConstraints.WHERE); + + /** + * Query sql + */ + private static final JobParamDefine QUERY_SQL = JobParams.define("connection[0].querySql[0]", job ->{ + JobParamSet sourceParams = job.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + String where = WHERE_CONDITION.getValue(sourceParams); + List columns = job.getSourceColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()){ + columns.add("*"); + } + return SQLCommandUtils.contactSql(Collections.singletonList(sourceParams + .get(JobParamConstraints.TABLE).getValue()), null, columns, null, where); + }, SubExchangisJob.class); + + /** + * SQL column + */ + private static final JobParamDefine> SQL_COLUMN = JobParams.define("column", job -> { + List columns = job.getSinkColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()){ + columns.add("*"); + } + return columns; + }, SubExchangisJob.class); + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(QUERY_SQL.newParam(subExchangisJob)); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(SQL_COLUMN.newParam(subExchangisJob)); + } + } + + @Override + public String dataSourceType() { + return "mysql"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + private JobParamDefine[] sourceMappings(){ + return new JobParamDefine[]{USERNAME, PASSWORD, SOURCE_DATABASE, + SOURCE_HOST, SOURCE_PORT, SOURCE_PARAMS_MAP}; + } + + public JobParamDefine[] sinkMappings(){ + return new JobParamDefine[]{USERNAME, PASSWORD, SINK_DATABASE, SINK_TABLE, + SINK_HOST, SINK_PORT, SINK_PARAMS_MAP}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/OracleDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/OracleDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..71cc41e69 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/OracleDataxSubExchangisJobHandler.java @@ -0,0 +1,122 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.utils.SQLCommandUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * Oracle in datax + */ +public class OracleDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Host + */ + private static final JobParamDefine SOURCE_HOST = JobParams.define("connection[0].jdbcUrl[0].host", JobParamConstraints.HOST); + private static final JobParamDefine SINK_HOST = JobParams.define("connection[0].jdbcUrl.host", JobParamConstraints.HOST); + + /** + * Port + */ + private static final JobParamDefine SOURCE_PORT = JobParams.define("connection[0].jdbcUrl[0].port", JobParamConstraints.PORT); + private static final JobParamDefine SINK_PORT = JobParams.define("connection[0].jdbcUrl.port", JobParamConstraints.PORT); + + /** + * ServiceName + */ + private static final JobParamDefine SOURCE_SERVICE_NAME = JobParams.define("connection[0].jdbcUrl[0].serviceName", JobParamConstraints.SERVICE_NAME); + private static final JobParamDefine SINK_SERVICE_NAME = JobParams.define("connection[0].jdbcUrl.serviceName", JobParamConstraints.SERVICE_NAME); + + /** + * Table + */ + private static final JobParamDefine SOURCE_TABLE = JobParams.define("table", JobParamConstraints.TABLE); + private static final JobParamDefine SINK_TABLE = JobParams.define("connection[0].table[0]", JobParamConstraints.TABLE); + + /** + * Connect params + */ + private static final JobParamDefine> SOURCE_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl[0].connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + private static final JobParamDefine> SINK_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl.connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + + /** + * Where condition + */ + private static final JobParamDefine SOURCE_WHERE_CONDITION = JobParams.define(JobParamConstraints.WHERE); + + + /** + * Query sql + */ + private static final JobParamDefine QUERY_SQL = JobParams.define("connection[0].querySql[0]", job -> { + JobParamSet sourceParams = job.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + String where = SOURCE_WHERE_CONDITION.getValue(sourceParams); + List columns = job.getSourceColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()) { + columns.add("*"); + } + return SQLCommandUtils.contactSql(Collections.singletonList(sourceParams + .get(JobParamConstraints.TABLE).getValue()), null, columns, null, where); + }, SubExchangisJob.class); + + /** + * SQL column + */ + private static final JobParamDefine> SQL_COLUMN = JobParams.define("column", job -> { + List columns = job.getSinkColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()) { + columns.add("*"); + } + return columns; + }, SubExchangisJob.class); + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(paramSet)) { + Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(QUERY_SQL.newParam(subExchangisJob)); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)) { + Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(SQL_COLUMN.newParam(subExchangisJob)); + } + } + + @Override + public String dataSourceType() { + return "oracle"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + private JobParamDefine[] sourceMappings() { + return new JobParamDefine[]{USERNAME, PASSWORD, SOURCE_TABLE, SOURCE_WHERE_CONDITION, + SOURCE_HOST, SOURCE_PORT, SOURCE_SERVICE_NAME, SOURCE_PARAMS_MAP}; + } + + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{USERNAME, PASSWORD, SINK_TABLE, + SINK_HOST, SINK_PORT, SINK_SERVICE_NAME, SINK_PARAMS_MAP}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java index aa14f0ebf..cb9a2db1e 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java @@ -4,31 +4,40 @@ import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; -import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.SubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.AuthEnabledSubExchangisJobHandler; import org.apache.linkis.common.exception.ErrorException; import java.util.Arrays; import java.util.Objects; import java.util.Optional; +import java.util.function.Consumer; /** * Implement "SubExchangisJobHandler", only handle the params of job */ -public abstract class AbstractExchangisJobParamsMapping implements SubExchangisJobHandler { +public abstract class AbstractExchangisJobParamsMapping extends AuthEnabledSubExchangisJobHandler { @Override - public void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + subExchangisJob.getSourceColumns().forEach(srcColumnMappingFunc()); JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); if (Objects.nonNull(paramSet)){ - Optional.ofNullable(sourceMappings()).ifPresent(jobParamDefines -> Arrays.asList(jobParamDefines).forEach(paramSet::addNonNull)); +// info("SourceParamSet: {}", Json.toJson(paramSet.toList().stream().collect( +// Collectors.toMap(JobParam::getStrKey, JobParam::getValue)), null)); + Optional.ofNullable(sourceMappings()).ifPresent(jobParamDefines -> + Arrays.asList(jobParamDefines).forEach(define -> paramSet.addNonNull(define.get(paramSet)))); } } @Override - public void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + subExchangisJob.getSinkColumns().forEach(sinkColumnMappingFunc()); JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); if (Objects.nonNull(paramSet)){ - Optional.ofNullable(sourceMappings()).ifPresent(jobParamDefines -> Arrays.asList(jobParamDefines).forEach(paramSet::addNonNull)); +// info("SinkParamSet: {}", Json.toJson(paramSet.toList().stream().collect( +// Collectors.toMap(JobParam::getStrKey, JobParam::getValue)), null)); + Optional.ofNullable(sinkMappings()).ifPresent(jobParamDefines -> + Arrays.asList(jobParamDefines).forEach(define -> paramSet.addNonNull(define.get(paramSet)))); } } @@ -44,4 +53,20 @@ public void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderConte * @return */ public abstract JobParamDefine[] sinkMappings(); + + /** + * Source columns mapping function + * @return consumer function + */ + protected Consumer srcColumnMappingFunc(){ + return columnDefine -> {}; + } + + /** + * Sink columns mapping function + * @return consumer function + */ + protected Consumer sinkColumnMappingFunc(){ + return columnDefine -> {}; + } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/EsDataxParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/EsDataxParamsMapping.java new file mode 100644 index 000000000..1afe54842 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/EsDataxParamsMapping.java @@ -0,0 +1,110 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Elastic search datax mapping + */ +public class EsDataxParamsMapping extends AbstractExchangisJobParamsMapping{ + + private static final Logger LOG = LoggerFactory.getLogger(EsDataxParamsMapping.class); + + /** + * Elastic search urls + */ + private static final JobParamDefine ELASTIC_URLS = JobParams.define("elasticUrls", "elasticUrls", urls -> { + List elasticUrls = Json.fromJson(urls, List.class, String.class); + if (Objects.nonNull(elasticUrls)){ + return StringUtils.join(elasticUrls, ","); + } + return null; + }, String.class); + + /** + * Index name + */ + private static final JobParamDefine INDEX = JobParams.define("index", JobParamConstraints.DATABASE); + + /** + * Index type + */ + private static final JobParamDefine TYPE = JobParams.define("type", JobParamConstraints.TABLE); + + /** + * If in security connection + */ + private static final JobParamDefine SECURE = JobParams.define("secure"); + + /** + * Max merge count + */ + private static final JobParamDefine> SETTINGS = JobParams.define("settings", + () -> { + Map settings = new HashMap<>(); + settings.put("index.merge.scheduler.max_merge_count", 100); + return settings; + }); + + /** + * Clean up + */ + private static final JobParamDefine CLEANUP = JobParams.define("cleanUp", () -> "false"); + + /** + * Max pool size + */ + private static final JobParamDefine CLIENT_MAX_POOL_SIZE = JobParams.define("clientConfig.maxPoolSize", () -> "1"); + + /** + * Socket time out + */ + private static final JobParamDefine CLIENT_SOCK_TIMEOUT = JobParams.define("clientConfig.sockTimeout", () -> "60000"); + + /** + * Connection timeout + */ + private static final JobParamDefine CLIENT_CONN_TIMEOUT = JobParams.define("clientConfig.connTimeout", () -> "60000"); + + /** + * Timeout + */ + private static final JobParamDefine CLIENT_TIMEOUT = JobParams.define("clientConfig.timeout", () -> "60000"); + + /** + * Compress + */ + private static final JobParamDefine CLIENT_COMPRESS = JobParams.define("clientConfig.compress", () -> "true"); + @Override + public String dataSourceType() { + return "elasticsearch"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + @Override + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[0]; + } + + @Override + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{USERNAME, PASSWORD, ELASTIC_URLS, INDEX, TYPE, SECURE, + SETTINGS, CLEANUP, CLIENT_MAX_POOL_SIZE, CLIENT_SOCK_TIMEOUT, CLIENT_CONN_TIMEOUT, + CLIENT_TIMEOUT, CLIENT_COMPRESS + }; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java index 5ffb6caa8..031425be7 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java @@ -1,16 +1,335 @@ package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.lang3.StringUtils; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +/** + * Hive datax mapping + */ public class HiveDataxParamsMapping extends AbstractExchangisJobParamsMapping{ + + private static final Map FIELD_MAP = new HashMap<>(); + + private static final BitSet CHAR_TO_ESCAPE = new BitSet(128); + + private static final String[] SOURCE_SUPPORT_FILETYPE = new String[]{"TEXT", "ORC","RC","SEQ","CSV"}; + + private static final String[] SINK_SUPPORT_FILETYPE = new String[]{"ORC", "TEXT"}; + + private enum Type { + /** + * types that supported by DataX + */ + STRING, LONG, BOOLEAN, DOUBLE, DATE + } + //hive type => dataX type + static{ + FIELD_MAP.put("TINYINT", Type.LONG); + FIELD_MAP.put("SMALLINT", Type.LONG); + FIELD_MAP.put("INT", Type.LONG); + FIELD_MAP.put("BIGINT", Type.LONG); + FIELD_MAP.put("FLOAT", Type.DOUBLE); + FIELD_MAP.put("DOUBLE", Type.DOUBLE); + FIELD_MAP.put("DECIMAL", Type.DOUBLE); + FIELD_MAP.put("STRING", Type.STRING); + FIELD_MAP.put("CHAR", Type.STRING); + FIELD_MAP.put("VARCHAR", Type.STRING); + FIELD_MAP.put("STRUCT", Type.STRING); + FIELD_MAP.put("MAP", Type.STRING); + FIELD_MAP.put("ARRAY", Type.STRING); + FIELD_MAP.put("UNION", Type.STRING); + FIELD_MAP.put("BINARY", Type.STRING); + FIELD_MAP.put("BOOLEAN", Type.BOOLEAN); + FIELD_MAP.put("DATE", Type.DATE); + FIELD_MAP.put("TIMESTAMP", Type.DATE); + } + + /** + * Hive database + */ + private static final JobParamDefine HIVE_DATABASE = JobParams.define("hiveDatabase", JobParamConstraints.DATABASE); + + /** + * Hive table + */ + private static final JobParamDefine HIVE_TABLE = JobParams.define("hiveTable", JobParamConstraints.TABLE); + + /** + * Hive uris + */ + private static final JobParamDefine HIVE_URIS = JobParams.define("hiveMetastoreUris", "uris"); + + /** + * Data file name (prefix) + */ + private static final JobParamDefine DATA_FILE_NAME = JobParams.define("fileName", () -> "exch_hive_"); + /** + * Encoding + */ + private static final JobParamDefine ENCODING = JobParams.define("encoding", paramSet -> { + JobParam encodingParam = paramSet.get(JobParamConstraints.ENCODING); + if (Objects.nonNull(encodingParam)){ + return encodingParam.getValue(); + } + return "utf-8"; + }); + + /** + * Null format + */ + private static final JobParamDefine NULL_FORMAT = JobParams.define("nullFormat", paramSet -> { + JobParam nullFormatParam = paramSet.get(JobParamConstraints.NULL_FORMAT); + if (Objects.nonNull(nullFormatParam)){ + return nullFormatParam.getValue(); + } + return "\\N"; + }); + /** + * Table partition + */ + private static final JobParamDefine> TABLE_PARTITION = JobParams.define(JobParamConstraints.PARTITION); + + /** + * Table properties + */ + private static final JobParamDefine> HIVE_TABLE_PROPS = JobParams.define("tableProps", paramSet -> { + String database = HIVE_DATABASE.getValue(paramSet); + String table = HIVE_TABLE.getValue(paramSet); + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + try { + return Objects.requireNonNull(getBean(MetadataInfoService.class)).getTableProps(getJobBuilderContext().getOriginalJob().getCreateUser(), + Long.valueOf(dataSourceId.getValue()), database, table); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + }); + + /** + * Partition keys + */ + private static final JobParamDefine> PARTITION_KEYS = JobParams.define("partitionKeys", paramSet -> { + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + List partitionKeys = new ArrayList<>(); + String database = HIVE_DATABASE.getValue(paramSet); + String table = HIVE_TABLE.getValue(paramSet); + try { + partitionKeys = Objects.requireNonNull(getBean(MetadataInfoService.class)).getPartitionKeys(getJobBuilderContext().getOriginalJob().getCreateUser(), + Long.parseLong(dataSourceId.getValue()), database, table); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + return partitionKeys; + }); + /** + * Partition values + */ + private static final JobParamDefine PARTITION_VALUES = JobParams.define("partitionValues", paramSet -> { + Map partitions = Optional.ofNullable(TABLE_PARTITION.getValue(paramSet)).orElse(new HashMap<>()); + //Try to find actual partition from table properties + List partitionKeys = PARTITION_KEYS.getValue(paramSet); + String[] partitionColumns = Objects.isNull(partitionKeys)? new String[0]: partitionKeys.toArray(new String[0]); + if (partitionColumns.length > 0 && partitions.size() != partitionColumns.length){ + throw new ExchangisJobException.Runtime(-1, "Unmatched partition list: [" + + StringUtils.join(partitionColumns, ",") + "]", null); + } + if (partitionColumns.length > 0){ + return Arrays.stream(partitionColumns).map(partitions::get).collect(Collectors.joining(",")); + } + return null; + }); + + /** + * Field delimiter + */ + private static final JobParamDefine FIELD_DELIMITER = JobParams.define("fieldDelimiter", paramSet -> + HIVE_TABLE_PROPS.getValue(paramSet).getOrDefault("field.delim", "\u0001")); + + /** + * File type + */ + private static final JobParamDefine FILE_TYPE = JobParams.define("fileType", paramSet -> { + Map tableProps = HIVE_TABLE_PROPS.getValue(paramSet); + AtomicReference fileType = new AtomicReference<>(); + Optional.ofNullable(tableProps.get("serialization.lib")).ifPresent(serLib -> fileType + .set(HiveV2FileType.serde(serLib))); + if (Objects.nonNull(fileType.get())){ + Optional.ofNullable(tableProps.get("file.inputformat")).ifPresent(inputFormat -> fileType + .set(HiveV2FileType.input(inputFormat))); + } + if (Objects.nonNull(fileType.get())){ + Optional.ofNullable(tableProps.get("file.outputformat")).ifPresent(outputFormat -> fileType + .set(HiveV2FileType.output(outputFormat))); + } + return Objects.nonNull(fileType.get())? fileType.get() : HiveV2FileType.TEXT; + }); + + /** + * Data location + */ + private static final JobParamDefine DATA_LOCATION = JobParams.define("location", paramSet -> { + Map tableProps = HIVE_TABLE_PROPS.getValue(paramSet); + String path = tableProps.getOrDefault("location", ""); + String partitionValues = PARTITION_VALUES.getValue(paramSet); + if (StringUtils.isNotBlank(partitionValues)){ + String[] values = partitionValues.split(","); + String[] keys = PARTITION_KEYS.getValue(paramSet).toArray(new String[0]); + // Escape the path and value of partition + StringBuilder pathBuilder = new StringBuilder(path).append("/"); + for(int i = 0; i < keys.length; i++){ + if (i > 0){ + pathBuilder.append("/"); + } + pathBuilder.append(escapeHivePathName(keys[i])); + pathBuilder.append("="); + pathBuilder.append(escapeHivePathName(values[i])); + } + path = pathBuilder.toString(); + } + return path.replaceAll(" ", "%20"); + }); + + /** + * Compress name + */ + private static final JobParamDefine COMPRESS_NAME = JobParams.define("compress", paramSet -> { + HiveV2FileType fileType = FILE_TYPE.getValue(paramSet); + if (HiveV2FileType.TEXT.equals(fileType)){ + return "GZIP"; + } else if (HiveV2FileType.ORC.equals(fileType)){ + return "SNAPPY"; + } + return null; + }); + + /** + * Data path + */ + private static final JobParamDefine DATA_PATH = JobParams.define("path", paramSet -> { + String location = DATA_LOCATION.getValue(paramSet); + if (StringUtils.isNotBlank(location)){ + try { + return new URI(location).getPath(); + } catch (URISyntaxException e) { + warn("Unrecognized location: [{}]", location, e); + } + } + return null; + }); + + /** + * Hadoop config + */ + private static final JobParamDefine> HADOOP_CONF = JobParams.define("hadoopConfig", paramSet -> { + String uri = DATA_LOCATION.getValue(paramSet); + try { + // TODO get the other hdfs cluster with tab + return Objects.requireNonNull(getBean(MetadataInfoService.class)).getLocalHdfsInfo(uri); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getDesc(), e.getCause()); + } + }); + + /** + * To "defaultFS" + */ + private static final JobParamDefine DEFAULT_FS = JobParams.define("defaultFS", paramSet -> + HADOOP_CONF.getValue(paramSet).get("fs.defaultFS")); + + private static final JobParamDefine IS_SINK_FILETYPE_SUPPORT = JobParams.define("sink.fileType.support", paramSet -> { + if (!isSupport(FILE_TYPE.getValue(paramSet).name(), SINK_SUPPORT_FILETYPE)){ + throw new ExchangisJobException.Runtime(-1, "Unsupported sink file type [" + FILE_TYPE.getValue(paramSet).name() + "] of hive", null); + } + return null; + }); + + private static final JobParamDefine IS_SOURCE_FILETYPE_SUPPORT = JobParams.define("sink.fileType.support", paramSet -> { + if (!isSupport(FILE_TYPE.getValue(paramSet).name(), SOURCE_SUPPORT_FILETYPE)){ + throw new ExchangisJobException.Runtime(-1, "Unsupported source file type [" + FILE_TYPE.getValue(paramSet).name() + "] of hive", null); + } + return null; + }); + // TODO kerberos params + + /** + * Escape hive path name + * @param path path name + * @return path + */ + protected static String escapeHivePathName(String path) { + if (path != null && path.length() != 0) { + StringBuilder sb = new StringBuilder(); + + for(int i = 0; i < path.length(); ++i) { + char c = path.charAt(i); + if (c < CHAR_TO_ESCAPE.size() && CHAR_TO_ESCAPE.get(c)) { + sb.append('%'); + sb.append(String.format("%1$02X", (int) c)); + } else { + sb.append(c); + } + } + + return sb.toString(); + } else { + return "__HIVE_DEFAULT_PARTITION__"; + } + } + + protected static boolean isSupport(String value, String[] array){ + boolean isSupport = false; + for(String item: array){ + if(item.equalsIgnoreCase(value)){ + isSupport = true; + break; + } + } + return isSupport; + } + @Override public JobParamDefine[] sourceMappings() { - return new JobParamDefine[0]; + return new JobParamDefine[]{HIVE_DATABASE, HIVE_TABLE, ENCODING, + NULL_FORMAT, PARTITION_VALUES, FIELD_DELIMITER, FILE_TYPE, DATA_PATH, HADOOP_CONF, DEFAULT_FS, + IS_SOURCE_FILETYPE_SUPPORT}; } @Override public JobParamDefine[] sinkMappings() { - return new JobParamDefine[0]; + return new JobParamDefine[]{HIVE_DATABASE, HIVE_TABLE, ENCODING, + NULL_FORMAT, PARTITION_VALUES, FIELD_DELIMITER, FILE_TYPE, DATA_PATH, HADOOP_CONF, DEFAULT_FS, + COMPRESS_NAME, IS_SINK_FILETYPE_SUPPORT, HIVE_URIS, DATA_FILE_NAME}; + } + + @Override + protected Consumer srcColumnMappingFunc() { + return columnDefine -> { + String type = columnDefine.getType(); + Type t = FIELD_MAP.get(type.toUpperCase().replaceAll("[(<(][\\s\\S]+", "")); + if (null != t){ + columnDefine.setType(t.toString()); + } else { + columnDefine.setType(Type.STRING.toString()); + } + }; + } + + @Override + protected Consumer sinkColumnMappingFunc() { + return columnDefine -> columnDefine.setType(columnDefine.getType().replaceAll("[(<(][\\s\\S]+", "")); } @Override @@ -22,4 +341,5 @@ public String dataSourceType() { public boolean acceptEngine(String engineType) { return "datax".equalsIgnoreCase(engineType); } + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java index 5cd5a00ab..366d89b82 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java @@ -25,7 +25,7 @@ public boolean acceptEngine(String engineType) { public JobParamDefine[] sourceMappings() { return new JobParamDefine[]{ //Unit test - JobParams.define("version", "source.version"), + JobParams.define("version", "source.version" ), JobParams.define("version", () -> "1.4.7"), JobParams.define("tab", (BiFunction)(key, paramSet)->{ JobParams.define("version").newParam(paramSet).getValue(); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveV2FileType.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveV2FileType.java new file mode 100644 index 000000000..f4479e602 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveV2FileType.java @@ -0,0 +1,69 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import java.util.HashMap; +import java.util.Map; + +/** + * Hive File type for Hive version 2.x + */ +public enum HiveV2FileType { + /** + * TYPE:TEXT + */ + TEXT, + /** + * TYPE:ORC + */ + ORC, + /** + * TYPE:AVRO + */ + AVRO, + /** + * TYPE:PARQUET + */ + PARQUET, + /** + * TYPE:RC + */ + RC, + /** + * TYPE:SEQUENCE + */ + SEQ; + + static final Map SERDE = new HashMap<>(); + static final Map INPUT = new HashMap<>(); + static final Map OUTPUT = new HashMap<>(); + static{ + SERDE.put("org.apache.hadoop.hive.ql.io.orc.OrcSerde", ORC); + SERDE.put("org.apache.hadoop.hive.serde2.avro.AvroSerDe", AVRO); + SERDE.put("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", PARQUET); + INPUT.put("org.apache.hadoop.mapred.TextInputFormat", TEXT); + INPUT.put("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", ORC); + INPUT.put("org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat", AVRO); + INPUT.put("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", PARQUET); + INPUT.put("org.apache.hadoop.hive.ql.io.RCFileInputFormat", RC); + INPUT.put("org.apache.hadoop.mapred.SequenceFileInputFormat", SEQ); + OUTPUT.put("org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", TEXT); + OUTPUT.put("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", ORC); + OUTPUT.put("org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat", AVRO); + OUTPUT.put("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", PARQUET); + OUTPUT.put("org.apache.hadoop.hive.ql.io.RCFileOutputFormat", RC); + OUTPUT.put("org.apache.hadoop.mapred.SequenceFileOutputFormat", SEQ); + } + HiveV2FileType(){ + } + + static HiveV2FileType serde(String serializationClz){ + return SERDE.get(serializationClz); + } + + static HiveV2FileType input(String inputFormat){ + return INPUT.get(inputFormat); + } + + static HiveV2FileType output(String outputFormat){ + return OUTPUT.get(outputFormat); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java index c799a7751..d26c88abd 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java @@ -1,7 +1,6 @@ package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; -import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; public class MySQLSqoopParamsMapping extends AbstractExchangisJobParamsMapping{ @Override diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java index 3be6df394..0a5e3e2b4 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java @@ -161,6 +161,11 @@ public void onDelete(TaskDeleteEvent deleteEvent) { // Ignore } + @Override + public void onDequeue(TaskDequeueEvent dequeueEvent) throws ExchangisOnEventException { + //Ignore + } + @Override public void onProgressUpdate(TaskProgressUpdateEvent updateEvent) { // Ignore diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java index 332ac0de7..b3a05b8af 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java @@ -74,7 +74,7 @@ public void addRunningTask(LaunchedExchangisTask task) { task.setStatus(TaskStatus.Running); task.setRunningTime(Calendar.getInstance().getTime()); onEvent(new TaskLaunchEvent(task)); - info(task, "Status of task: [name: {}, id: {}] change to {}, info: [{}]", task.getName(), task.getTaskId(), task.getStatus(), Json.toJson(task, null)); + info(task, "Status of task: [name: {}, id: {}] change to {}, info: [{}]", task.getName(), task.getTaskId(), task.getStatus(), ""); if (Objects.isNull(runningTasks.putIfAbsent(task.getTaskId(), task))){ jobWrappers.compute(task.getJobExecutionId(), (jobExecutionId, jobWrapper) -> { if (Objects.nonNull(jobWrapper) && jobWrapper.addTask(task)){ diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java index e7a119334..e050991d2 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java @@ -25,6 +25,8 @@ default void onEvent(TaskExecutionEvent event) throws ExchangisOnEventException{ onDelete((TaskDeleteEvent)event); } else if (event instanceof TaskProgressUpdateEvent){ onProgressUpdate((TaskProgressUpdateEvent)event); + } else if (event instanceof TaskDequeueEvent){ + onDequeue((TaskDequeueEvent) event); } } @@ -52,9 +54,17 @@ default void onEvent(TaskExecutionEvent event) throws ExchangisOnEventException{ */ void onDelete(TaskDeleteEvent deleteEvent) throws ExchangisOnEventException; + /** + * Dequeue event + * @param dequeueEvent dequeue event + * @throws ExchangisOnEventException exception + */ + void onDequeue(TaskDequeueEvent dequeueEvent) throws ExchangisOnEventException; + /** * Progress update * @param updateEvent update event */ void onProgressUpdate(TaskProgressUpdateEvent updateEvent) throws ExchangisOnEventException; + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDequeueEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDequeueEvent.java new file mode 100644 index 000000000..900bbfd1e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDequeueEvent.java @@ -0,0 +1,32 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + + +/** + * Event that remove the launchable task from the queue(table) + */ +public class TaskDequeueEvent extends TaskExecutionEvent{ + /** + * Task id + */ + private String taskId; + /** + * @param taskId task id + */ + public TaskDequeueEvent(String taskId) { + super(null); + this.taskId = taskId; + } + + @Override + public String eventId() { + return "_TaskExecution_" + this.taskId; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java index 8248bae5a..a14d22182 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java @@ -9,7 +9,7 @@ import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; -import com.webank.wedatasphere.exchangis.job.server.builder.ServiceInExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateErrorEvent; @@ -39,7 +39,7 @@ private static class Constraints{ } protected TaskGeneratorContext ctx; - private ExchangisJobBuilderManager jobBuilderManager; + private final ExchangisJobBuilderManager jobBuilderManager; /** * Generate task id @@ -75,10 +75,16 @@ protected void execute(LaunchableExchangisJob launchableExchangisJob, throw throwable; } ExchangisJobBuilderManager jobBuilderManager = getExchangisJobBuilderManager(); - ServiceInExchangisJobBuilderContext ctx = new ServiceInExchangisJobBuilderContext(jobInfo, generatorContext.getJobLogListener()); - // Set the metadata service - ctx.setMetadataInfoService(generatorContext.getMetadataInfoService()); - ctx.setJobExecutionId(launchableExchangisJob.getJobExecutionId()); + ExchangisJobBuilderContext ctx; + if (generatorContext instanceof SpringTaskGeneratorContext){ + // Spring job builder context + ctx = new SpringExchangisJobBuilderContext(jobInfo, + ((SpringTaskGeneratorContext) generatorContext).getApplicationContext(), + generatorContext.getJobLogListener()); + ((SpringExchangisJobBuilderContext)ctx).setJobExecutionId(launchableExchangisJob.getJobExecutionId()); + } else { + ctx = new ExchangisJobBuilderContext(jobInfo); + } ctx.putEnv("USER_NAME", tenancy); // ExchangisJobInfo -> TransformExchangisJob(SubExchangisJob) try { diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGeneratorContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGeneratorContext.java deleted file mode 100644 index e43a51dc7..000000000 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGeneratorContext.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.webank.wedatasphere.exchangis.job.server.execution.generator; - -import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; -import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; - -/** - * Default generator context - */ -public class DefaultTaskGeneratorContext implements TaskGeneratorContext { - - private JobLogListener jobLogListener; - - private MetadataInfoService metadataInfoService; - - public DefaultTaskGeneratorContext(){ - - } - public DefaultTaskGeneratorContext(JobLogListener jobLogListener, - MetadataInfoService metadataInfoService){ - this.jobLogListener = jobLogListener; - this.metadataInfoService = metadataInfoService; - } - - @Override - public JobLogListener getJobLogListener() { - return this.jobLogListener; - } - - @Override - public MetadataInfoService getMetadataInfoService() { - return metadataInfoService; - } -} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/SpringTaskGeneratorContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/SpringTaskGeneratorContext.java new file mode 100644 index 000000000..c591448ed --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/SpringTaskGeneratorContext.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import org.springframework.context.ApplicationContext; + +/** + * Spring generator context (with application context) + */ +public class SpringTaskGeneratorContext implements TaskGeneratorContext { + + private JobLogListener jobLogListener; + + /** + * Spring application context + */ + private ApplicationContext applicationContext; + + public SpringTaskGeneratorContext(){ + + } + public SpringTaskGeneratorContext(JobLogListener jobLogListener, + ApplicationContext applicationContext){ + this.jobLogListener = jobLogListener; + this.applicationContext = applicationContext; + } + + @Override + public JobLogListener getJobLogListener() { + return this.jobLogListener; + } + + public ApplicationContext getApplicationContext() { + return this.applicationContext; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java index d2a8ee6dc..0c45979a5 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java @@ -1,6 +1,5 @@ package com.webank.wedatasphere.exchangis.job.server.execution.generator; -import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; /** @@ -14,9 +13,4 @@ public interface TaskGeneratorContext { */ JobLogListener getJobLogListener(); - /** - * Metadata info service - * @return - */ - MetadataInfoService getMetadataInfoService(); } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java index 6ed5f556d..b88cc58b3 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java @@ -11,8 +11,10 @@ import org.apache.commons.lang.StringUtils; import org.apache.linkis.common.conf.CommonVars; import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.SchedulerContext; import org.apache.linkis.scheduler.queue.ConsumerManager; import org.apache.linkis.scheduler.queue.GroupFactory; +import org.apache.linkis.scheduler.queue.SchedulerEventState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -54,7 +56,6 @@ public FlexibleTenancyLoadBalancer(Scheduler scheduler, TaskManager choose(LaunchedExchangisTask launchedExchangisTask, Class schedulerTaskClass, boolean unchecked) { if( !unchecked || isSuitableClass(schedulerTaskClass)){ - String schedulerTaskName = schedulerTaskClass.getSimpleName(); // Fetch the latest info launchedExchangisTask = getTaskManager().getRunningTask(launchedExchangisTask.getTaskId()); // If the value is None means that the task is ended @@ -70,29 +71,8 @@ protected LoadBalanceSchedulerTask choose(LaunchedExchang if (StringUtils.isBlank(tenancy)) { tenancy = TenancyParallelGroupFactory.DEFAULT_TENANCY; } - String finalTenancy = tenancy; - SchedulerTaskContainer schedulerTaskContainer =tenancySchedulerTasks.compute(tenancy + "_" + schedulerTaskName,(key, taskContainer) -> { - if (Objects.isNull(taskContainer)){ - LoadBalanceSchedulerTask headSchedulerTask = createLoadBalanceSchedulerTask(schedulerTaskClass); - if (headSchedulerTask instanceof AbstractLoadBalanceSchedulerTask){ - ((AbstractLoadBalanceSchedulerTask) headSchedulerTask) - .setSchedulerLoadBalancer(FlexibleTenancyLoadBalancer.this); - } - headSchedulerTask.setTenancy(finalTenancy); - try { - getScheduler().submit(headSchedulerTask); - } catch (Exception e){ - // Only if not enough reserved threads in scheduler - throw new ExchangisTaskExecuteException.Runtime("If there is no enough reserved threads in scheduler for tenancy: [" + finalTenancy - + "], load balance scheduler task: [" + schedulerTaskName + "]? please invoke setInitResidentThreads(num) method in consumerManager", e); - } - taskContainer = new SchedulerTaskContainer(headSchedulerTask); - taskContainer.tenancy = finalTenancy; - } - return taskContainer; - }); // Select one - return schedulerTaskContainer.select(); + return geOrCreateSchedulerTaskContainer(tenancy, schedulerTaskClass).select(); } } @@ -136,6 +116,7 @@ private LoadBalanceSchedulerTask createLoadBalanceSchedul public void run() { Thread.currentThread().setName("Balancer-Thread" + getName()); LOG.info("Thread:[ {} ] is started. ", Thread.currentThread().getName()); + initLoadBalancerSchedulerTasks(); ConsumerManager consumerManager = getScheduler().getSchedulerContext().getOrCreateConsumerManager(); Map tenancyExecutorServices = new HashMap<>(); int residentThreads = 0; @@ -272,6 +253,57 @@ public String getName() { return this.getClass().getSimpleName(); } + /** + * Get or create scheduler task container + * @return container + */ + private SchedulerTaskContainer geOrCreateSchedulerTaskContainer(String tenancy, Class schedulerTaskClass){ + String schedulerTaskName = schedulerTaskClass.getSimpleName(); + return tenancySchedulerTasks.compute(tenancy + "_" + schedulerTaskName,(key, taskContainer) -> { + if (Objects.isNull(taskContainer)){ + LoadBalanceSchedulerTask headSchedulerTask = createLoadBalanceSchedulerTask(schedulerTaskClass); + if (headSchedulerTask instanceof AbstractLoadBalanceSchedulerTask){ + ((AbstractLoadBalanceSchedulerTask) headSchedulerTask) + .setSchedulerLoadBalancer(FlexibleTenancyLoadBalancer.this); + } + headSchedulerTask.setTenancy(tenancy); + try { + getScheduler().submit(headSchedulerTask); + } catch (Exception e){ + // Only if not enough reserved threads in scheduler + throw new ExchangisTaskExecuteException.Runtime("If there is no enough reserved threads in scheduler for tenancy: [" + tenancy + + "], load balance scheduler task: [" + schedulerTaskName + "]? please invoke setInitResidentThreads(num) method in consumerManager", e); + } + taskContainer = new SchedulerTaskContainer(headSchedulerTask); + taskContainer.tenancy = tenancy; + LOG.info("Create scheduler task container[ tenancy: {}, load balance scheduler task: {} ]", tenancy, schedulerTaskName); + } + return taskContainer; + }); + } + + /** + * Init to pre create task container for load balancer scheduler tasks + */ + private void initLoadBalancerSchedulerTasks(){ + SchedulerContext schedulerContext = getScheduler().getSchedulerContext(); + if (schedulerContext instanceof ExchangisSchedulerContext){ + Optional.ofNullable(((ExchangisSchedulerContext)schedulerContext).getTenancies()).ifPresent(tenancies -> { + tenancies.forEach(tenancy -> { + // Skip the system tenancy + if (!tenancy.startsWith(".")) { + for (Class registeredTaskClass : registeredTaskClasses) { + geOrCreateSchedulerTaskContainer(tenancy, registeredTaskClass); + } + } + }); + }); + // init scheduler task container for default tenancy + for (Class registeredTaskClass : registeredTaskClasses) { + geOrCreateSchedulerTaskContainer(TenancyParallelGroupFactory.DEFAULT_TENANCY, registeredTaskClass); + } + } + } static class LoopCounter { AtomicInteger containers = new AtomicInteger(0); @@ -282,6 +314,7 @@ static class LoopCounter { List taskContainers = new ArrayList<>(); } + /** * Scheduler */ @@ -333,7 +366,7 @@ private void adjustSegment(int adjustNum){ */ private void scaleOutSegment(int scaleOut){ int newSize = segments.length - scaleOut; - LOG.info("Scale-out segments for tenancy: [{}],scaleOut: [{}], newSize: [{}], scheduler task: [{}]", + LOG.info("Scale-out segments for tenancy: [{}],scaleOut: [{}], newSize: [{}], scheduler_task_type: [{}]", tenancy, scaleOut, newSize, taskName); if (newSize <= 0){ LOG.warn("Scale-out fail, the newSize cannot <= 0"); @@ -341,21 +374,50 @@ private void scaleOutSegment(int scaleOut){ } SchedulerTaskSegment[] newSegments = new SchedulerTaskSegment[newSize]; System.arraycopy(segments, 0, newSegments, 0, newSize); - int offset = 0; + int offset = -1; + Map>> waitForCombine = new HashMap<>(); for(int i = newSize; i < segments.length; i ++){ LoadBalanceSchedulerTask schedulerTask = segments[i].loadBalanceSchedulerTask; try { - if (AbstractExchangisSchedulerTask.class.isAssignableFrom(schedulerTask.getClass())) { - ((AbstractExchangisSchedulerTask) schedulerTask).kill(); + SchedulerTaskSegment newSegment = null; + int count = 0; + do { + offset = (offset + 1) % newSize; + newSegment = newSegments[offset]; + count ++; + }while (newSegment.loadBalanceSchedulerTask.getState() != SchedulerEventState.Running() && count <= newSize); + if (offset != 0 && newSegment.loadBalanceSchedulerTask.getState() != SchedulerEventState.Running()){ + // Ignore the first load balance scheduler task + LOG.error("Unable to scale-out segments for tenancy: [{}], reason:" + + " the scheduler task has still in state[{}], scheduler_task_type: [{}], offset: [{}]", + tenancy, newSegment.loadBalanceSchedulerTask.getState(), taskName, offset); + return; } - // Merge the poller - LoadBalancePoller poller = schedulerTask.getOrCreateLoadBalancePoller(); - // Combine the poller - newSegments[offset % newSize].loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().combine(poller); + waitForCombine.compute(offset + "", (key, value) -> { + if (Objects.isNull(value)){ + value = new ArrayList<>(); + } + value.add(schedulerTask); + return value; + }); } catch (Exception e){ - LOG.warn("Scale-out segments for tenancy: [{}] wrong, index: [{}], scheduler task: [{}]", tenancy, i, taskName, e); + LOG.warn("Scale-out segments for tenancy: [{}] wrong, index: [{}], scheduler_task_type: [{}]", tenancy, i, taskName, e); } } + // Kill all + waitForCombine.forEach((key, tasks) -> { + SchedulerTaskSegment newSegment = newSegments[Integer.parseInt(key)]; + tasks.forEach(task -> { + // Kill task + if (AbstractExchangisSchedulerTask.class.isAssignableFrom(task.getClass())) { + ((AbstractExchangisSchedulerTask) task).kill(); + } + // Merge/Combine the poller + LoadBalancePoller poller = task.getOrCreateLoadBalancePoller(); + LOG.info("Merge/combine [{}] poller form {} to {}", taskName, task.getId(), newSegment.loadBalanceSchedulerTask.getId()); + newSegment.loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().combine(poller); + }); + }); segments = newSegments; } /** @@ -371,13 +433,24 @@ private void scaleInSegment(int scaleIn){ try { LoadBalanceSchedulerTask schedulerTask = createLoadBalanceSchedulerTask(segments[0].loadBalanceSchedulerTask.getClass()); + // + final SchedulerTaskSegment segment = new SchedulerTaskSegment(0, schedulerTask); if (schedulerTask instanceof AbstractLoadBalanceSchedulerTask){ ((AbstractLoadBalanceSchedulerTask) schedulerTask) .setSchedulerLoadBalancer(FlexibleTenancyLoadBalancer.this); + ((AbstractLoadBalanceSchedulerTask) schedulerTask).setScheduleListener( task -> { + segmentLock.writeLock().lock(); + try{ + segment.setWeight(1); + LOG.info("Init the weight of segment to 1, relate scheduler task: {}", task.getName()); + }finally { + segmentLock.writeLock().unlock(); + } + }); } schedulerTask.setTenancy(tenancy); + newSegments[i] = segment; getScheduler().submit(schedulerTask); - newSegments[i] = new SchedulerTaskSegment(1, schedulerTask); } catch (Exception e){ LOG.warn("Scale-in segments for tenancy: [{}] wrong, index: [{}]", tenancy, i, e); } @@ -442,6 +515,11 @@ private static class SchedulerTaskSegment{ this.loadBalanceSchedulerTask = task; this.schedulerId = task.getId(); } + + public void setWeight(int weight){ + this.wt = weight; + this.cwt = this.wt; + } } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java index 4a0cf9f3f..33edd7704 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java @@ -1,5 +1,6 @@ package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; +import org.apache.commons.lang3.StringUtils; import org.apache.linkis.common.conf.CommonVars; import org.apache.linkis.scheduler.AbstractScheduler; import org.apache.linkis.scheduler.SchedulerContext; @@ -8,6 +9,10 @@ import org.apache.linkis.scheduler.queue.GroupFactory; import org.apache.linkis.scheduler.queue.fifoqueue.FIFOSchedulerContextImpl; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + /** * Inherited the AbstractScheduler from linkis-scheduler */ @@ -17,7 +22,15 @@ private static class Constraints{ private static final CommonVars MAX_PARALLEL_PER_TENANCY = CommonVars.apply("wds.exchangis.job.scheduler.consumer.max.parallel.per-tenancy", 1); - private static final CommonVars TENANCY_PATTERN = CommonVars.apply("wds.exchangis.job.scheduler.consumer.tenancies", "hadoop,log"); + /** + * System tenancies + */ + private static final CommonVars SYSTEM_TENANCY_PATTERN = CommonVars.apply("wds.exchangis.job.scheduler.consumer.tenancies-system", ".log"); + + /** + * Custom tenancies + */ + private static final CommonVars CUSTOM_TENANCY_PATTERN = CommonVars.apply("wds.exchangis.job.scheduler.consumer.tenancies", "hadoop"); private static final CommonVars GROUP_INIT_CAPACITY = CommonVars.apply("wds.exchangis.job.scheduler.group.min.capacity", 1000); @@ -40,7 +53,16 @@ public ExchangisGenericScheduler(ExecutorManager executorManager, ConsumerManage @Override public void init() { - this.schedulerContext = new ExchangisSchedulerContext(Constraints.MAX_PARALLEL_PER_TENANCY.getValue(), Constraints.TENANCY_PATTERN.getValue()); + List tenancies = new ArrayList<>(); + String sysTenancies = Constraints.SYSTEM_TENANCY_PATTERN.getValue(); + if (StringUtils.isNotBlank(sysTenancies)){ + tenancies.addAll(Arrays.asList(sysTenancies.split(","))); + } + String customTenancies = Constraints.CUSTOM_TENANCY_PATTERN.getValue(); + if (StringUtils.isNotBlank(customTenancies)){ + tenancies.addAll(Arrays.asList(customTenancies.split(","))); + } + this.schedulerContext = new ExchangisSchedulerContext(Constraints.MAX_PARALLEL_PER_TENANCY.getValue(), tenancies); GroupFactory groupFactory = this.schedulerContext.getOrCreateGroupFactory(); if (groupFactory instanceof TenancyParallelGroupFactory){ TenancyParallelGroupFactory tenancyParallelGroupFactory = (TenancyParallelGroupFactory)groupFactory; diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java index 3e2a14292..929961dbb 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java @@ -8,6 +8,7 @@ import java.util.Arrays; import java.util.Collections; +import java.util.List; /** * Contains the executorManager, consumerManager and groupFactory @@ -17,22 +18,20 @@ public class ExchangisSchedulerContext extends FIFOSchedulerContextImpl { /** * Tenancy list */ - private String tenancies; + private final List tenancies; private int maxParallelismPerUser = 1; - public ExchangisSchedulerContext(int maxParallelismPerUser, String tenancies) { + public ExchangisSchedulerContext(int maxParallelismPerUser, List tenancies) { super(Integer.MAX_VALUE); this.maxParallelismPerUser = maxParallelismPerUser; - if (StringUtils.isNotBlank(tenancies)){ - this.tenancies = tenancies; - } + this.tenancies = tenancies; } @Override public GroupFactory createGroupFactory() { TenancyParallelGroupFactory parallelGroupFactory = new TenancyParallelGroupFactory(); parallelGroupFactory.setParallelPerTenancy(maxParallelismPerUser); - parallelGroupFactory.setTenancies(StringUtils.isNotBlank(tenancies)? Arrays.asList(tenancies.split(",")) : Collections.emptyList()); + parallelGroupFactory.setTenancies(this.tenancies); return parallelGroupFactory; } @@ -41,4 +40,7 @@ public ConsumerManager createConsumerManager() { throw new ExchangisSchedulerException.Runtime("Must set the consumer manager before scheduling", null); } + public List getTenancies() { + return tenancies; + } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ScheduleListener.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ScheduleListener.java new file mode 100644 index 000000000..790b90d71 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ScheduleListener.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerTask; + +/** + * Schedule listener + * @param + */ +public interface ScheduleListener { + /** + * On schedule event + * @param schedulerTask scheduler task + */ + void onSchedule(T schedulerTask); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java index 73db64e94..72a883a25 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java @@ -3,6 +3,7 @@ import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ScheduleListener; import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.LoadBalancePoller; import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.SchedulerLoadBalancer; import org.slf4j.Logger; @@ -27,6 +28,11 @@ public abstract class AbstractLoadBalanceSchedulerTask extends AbstractExchan private boolean pollFinish = false; + /** + * Schedule listener + */ + private ScheduleListener> listener; + public AbstractLoadBalanceSchedulerTask() { super(""); } @@ -38,6 +44,10 @@ protected void schedule() throws ExchangisSchedulerException, ExchangisScheduler LOG.warn("LoadBalancePoller is empty in load balance scheduler task [{}]", getName()); return; } + if (!pollFinish && Objects.nonNull(listener)){ + // Invoke listener + listener.onSchedule(this); + } List pollElements = new ArrayList<>(); LOG.info("Start to iterate the poller in load balance scheduler task [{}]", getName()); while (!pollFinish && null != pollElements) { @@ -95,6 +105,10 @@ public void setSchedulerLoadBalancer(SchedulerLoadBalancer schedulerLoadBalan this.schedulerLoadBalancer = schedulerLoadBalancer; } + public void setScheduleListener(ScheduleListener> listener){ + this.listener = listener; + } + /** * Re push the element into poller with balancer * @param element element @@ -118,4 +132,5 @@ public void kill() { * @return */ protected abstract LoadBalancePoller createLoadBalancePoller(); + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java index 2eaa8cbb4..f158fef1d 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java @@ -15,5 +15,4 @@ public interface LoadBalanceSchedulerTask extends ExchangisSchedulerTask { * @return */ LoadBalancePoller getOrCreateLoadBalancePoller(); - } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java index e5033ee9f..3bfc64374 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java @@ -31,7 +31,7 @@ public StatusUpdateSchedulerTask(TaskManager taskManager) } @Override protected void onPoll(LaunchedExchangisTask launchedExchangisTask) throws ExchangisSchedulerException, ExchangisSchedulerRetryException { - LOG.trace("Status update task: [{}] in scheduler: [{}]", launchedExchangisTask.getId(), getName()); + LOG.info("Status update task: [{}] in scheduler: [{}]", launchedExchangisTask.getId(), getName()); AccessibleLauncherTask launcherTask = launchedExchangisTask.getLauncherTask(); try{ TaskProgressInfo progressInfo = launcherTask.getProgressInfo(); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java index f65507ca2..4a3df1e06 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java @@ -11,6 +11,7 @@ import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; import com.webank.wedatasphere.exchangis.job.server.execution.AbstractTaskManager; import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskDequeueEvent; import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskExecutionEvent; import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskDeleteEvent; import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskStatusUpdateEvent; @@ -23,6 +24,7 @@ import java.util.*; import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** @@ -43,29 +45,45 @@ public class SubmitSchedulerTask extends AbstractExchangisSchedulerTask implemen private Callable submitCondition; private AtomicInteger retryCnt = new AtomicInteger(0); + + /** + * Submittable + */ + private AtomicBoolean submitAble = new AtomicBoolean(false); /** * Each schedule task should has an id * */ public SubmitSchedulerTask(LaunchableExchangisTask task, Callable submitCondition) { - super(String.valueOf(task.getId())); - this.launchableExchangisTask = task; - this.submitCondition = submitCondition; + this(task, submitCondition, false); } public SubmitSchedulerTask(LaunchableExchangisTask task){ - this(task, null); + this(task, null, false); + } + public SubmitSchedulerTask(LaunchableExchangisTask task, Callable submitCondition, boolean checkCondition) { + super(String.valueOf(task.getId())); + this.launchableExchangisTask = task; + this.submitCondition = submitCondition; + if (checkCondition) { + try { + submitAble.set(submitCondition.call()); + } catch (Exception e) { + // Ignore + } + } } @Override protected void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { - Boolean submitAble; String jobExecutionId = this.launchableExchangisTask.getJobExecutionId(); - try { - submitAble = submitCondition.call(); - } catch (Exception e){ - throw new ExchangisSchedulerRetryException("Error occurred in examining submit condition for task: [" + launchableExchangisTask.getId() + "]", e); + if (!submitAble.get()) { + try { + submitAble.set(submitCondition.call()); + } catch (Exception e) { + throw new ExchangisSchedulerRetryException("Error occurred in examining submit condition for task: [" + launchableExchangisTask.getId() + "]", e); + } } - if (submitAble) { + if (submitAble.get()) { info(jobExecutionId, "Submit the launchable task: [name:{} ,id:{} ] to launcher: [{}], retry_count: {}", launchableExchangisTask.getName(), launchableExchangisTask.getId(), launcher.name(), retryCnt.get()); LaunchedExchangisTask launchedExchangisTask; @@ -75,17 +93,24 @@ protected void schedule() throws ExchangisSchedulerException, ExchangisScheduler launchedExchangisTask = launcher.launch(this.launchableExchangisTask); // launchedExchangisTask = new LaunchedExchangisTask(launchableExchangisTask); launchedExchangisTask.setLaunchTime(launchTime); + info(jobExecutionId, "Success to submit task:[name:{}, id:{}] to Linkis [linkis_id: {}, info: {}]", + launchedExchangisTask.getName(), launchedExchangisTask.getId(), launchedExchangisTask.getLinkisJobId(), launchedExchangisTask.getLinkisJobInfo()); } catch (Exception e) { + info(jobExecutionId, "Launch task:[name:{} ,id:{}] fail, possible reason is: [{}]", + launchableExchangisTask.getName(), launchableExchangisTask.getId(), getActualCause(e).getMessage()); if (retryCnt.incrementAndGet() < getMaxRetryNum()) { // Remove the launched task stored - onEvent(new TaskDeleteEvent(String.valueOf(launchableExchangisTask.getId()))); +// onEvent(new TaskDeleteEvent(String.valueOf(launchableExchangisTask.getId()))); throw new ExchangisSchedulerRetryException("Error occurred in invoking launching method for task: [" + launchableExchangisTask.getId() +"]", e); }else { // Update the launched task status to fail - launchedExchangisTask = new LaunchedExchangisTask(); - launchedExchangisTask.setTaskId(String.valueOf(launchableExchangisTask.getId())); - launchedExchangisTask.setJobExecutionId(launchableExchangisTask.getJobExecutionId()); - onEvent(new TaskStatusUpdateEvent(launchedExchangisTask, TaskStatus.Failed)); + // New be failed + // Remove the launched task stored + onEvent(new TaskDeleteEvent(String.valueOf(launchableExchangisTask.getId()))); +// launchedExchangisTask = new LaunchedExchangisTask(); +// launchedExchangisTask.setTaskId(String.valueOf(launchableExchangisTask.getId())); +// launchedExchangisTask.setJobExecutionId(launchableExchangisTask.getJobExecutionId()); +// onEvent(new TaskStatusUpdateEvent(launchedExchangisTask, TaskStatus.Failed)); } throw new ExchangisSchedulerException("Error occurred in invoking launching method for task: [" + launchableExchangisTask.getId() +"]", e); } @@ -98,27 +123,40 @@ protected void schedule() throws ExchangisSchedulerException, ExchangisScheduler successAdd = false; error(jobExecutionId, "Error occurred in adding running task: [{}] to taskManager, linkis_id: [{}], should kill the job in linkis!", launchedExchangisTask.getId(), launchedExchangisTask.getLinkisJobId(), e); - LaunchedExchangisTask finalLaunchedExchangisTask1 = launchedExchangisTask; Optional.ofNullable(launchedExchangisTask.getLauncherTask()).ifPresent(launcherTask -> { try { launcherTask.kill(); } catch (ExchangisTaskLaunchException ex){ - LOG.error("Kill linkis_id: [{}] fail", finalLaunchedExchangisTask1.getLinkisJobId(), e); + LOG.error("Kill linkis_id: [{}] fail", launchedExchangisTask.getLinkisJobId(), e); } }); } - if (successAdd && Objects.nonNull(this.loadBalancer)) { - // Add the launchedExchangisTask to the load balance poller - List> loadBalanceSchedulerTasks = this.loadBalancer.choose(launchedExchangisTask); - LaunchedExchangisTask finalLaunchedExchangisTask = launchedExchangisTask; - Optional.ofNullable(loadBalanceSchedulerTasks).ifPresent(tasks -> tasks.forEach(loadBalanceSchedulerTask -> { - loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().push(finalLaunchedExchangisTask); - })); + if (successAdd){ + try { + onEvent(new TaskDequeueEvent(launchableExchangisTask.getId() + "")); + }catch (Exception e){ + // Ignore the exception + LOG.warn("Fail to dequeue the launchable task [{}]", launchableExchangisTask.getId(), e); + } + if (Objects.nonNull(this.loadBalancer)){ + // Add the launchedExchangisTask to the load balance poller + List> loadBalanceSchedulerTasks = this.loadBalancer.choose(launchedExchangisTask); + Optional.ofNullable(loadBalanceSchedulerTasks).ifPresent(tasks -> tasks.forEach(loadBalanceSchedulerTask -> { + loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().push(launchedExchangisTask); + })); + } } } } } + /** + * Check if it can be submitted + * @return boolean + */ + public boolean isSubmitAble(){ + return submitAble.get(); + } /** * Listen the execution event * @param event @@ -134,6 +172,18 @@ public JobLogEvent getJobLogEvent(JobLogEvent.Level level, String executionId, S return new JobLogEvent(level, this.getTenancy(), executionId, message, args); } + /** + * Get actual cause + * @param throwable throwable + * @return Throwable + */ + private Throwable getActualCause(Throwable throwable){ + Throwable t = throwable; + while (Objects.nonNull(t.getCause())){ + t = t.getCause(); + } + return t; + } @Override public JobLogListener getJobLogListener() { if (Objects.nonNull(this.taskManager)){ diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java index 84fb7586b..2229dd1f0 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java @@ -24,7 +24,7 @@ public abstract class AbstractTaskObserver implements private static final Logger LOG = LoggerFactory.getLogger(AbstractTaskObserver.class); - private static final int DEFAULT_TASK_OBSERVER_PUBLISH_INTERVAL = 30000; + private static final int DEFAULT_TASK_OBSERVER_PUBLISH_INTERVAL = 10000; private static final int DEFAULT_TASK_OBSERVER_PUBLISH_BATCH = 50; @@ -76,12 +76,16 @@ public AbstractTaskObserver(){ public void run() { Thread.currentThread().setName("Observe-Thread-" + getName()); LOG.info("Thread: [ {} ] is started. ", Thread.currentThread().getName()); + this.lastPublishTime = System.currentTimeMillis(); while (!isShutdown) { try { List publishedTasks; try { publishedTasks = onPublish(publishBatch); - this.lastPublishTime = System.currentTimeMillis(); + // If list of publish tasks is not empty + if (publishedTasks.size() > 0) { + this.lastPublishTime = System.currentTimeMillis(); + } } catch (ExchangisTaskObserverException e){ e.setMethodName("call_on_publish"); throw e; diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java index 4500adadb..92236d133 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java @@ -38,7 +38,7 @@ public List onPublish(int batchSize) throws ExchangisTaskObserverException { } int fetchTaskSize = cacheTasks.size(); int restBatchSize = batchSize - fetchTaskSize; - if (restBatchSize > 0 && (this.lastPublishTime + this.publishInterval < System.currentTimeMillis())) { + if (restBatchSize > 0 && (this.lastPublishTime + this.publishInterval <= System.currentTimeMillis())) { Optional.ofNullable(onPublishNext(restBatchSize)).ifPresent(cacheTasks::addAll); } return cacheTasks; diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxParallelChooseRuler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxParallelChooseRuler.java new file mode 100644 index 000000000..4da588b73 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxParallelChooseRuler.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import org.apache.linkis.scheduler.Scheduler; + +import java.util.List; + +/** + * Max parallel number of tenancy in choose ruler + */ +public class MaxParallelChooseRuler extends MaxUsageTaskChooseRuler{ + + @Override + public List choose(List candidates, Scheduler scheduler) { + List usageChosen = super.choose(candidates, scheduler); + return null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java index 17b0e0b9f..e81824fe0 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java @@ -48,18 +48,21 @@ public void subscribe(List publishedTasks) throws Excha LaunchableExchangisTask launchableExchangisTask = iterator.next(); if (Objects.nonNull(launchableExchangisTask)){ try { + // Check the submittable condition first in order to avoid the duplicate scheduler tasks SubmitSchedulerTask submitSchedulerTask = new SubmitSchedulerTask(launchableExchangisTask, () -> { // check the status of launchedTask // insert or update launched task, status as TaskStatus.Scheduler return taskObserverService.subscribe(launchableExchangisTask); - }); - submitSchedulerTask.setTenancy(launchableExchangisTask.getExecuteUser()); - try { - taskExecution.submit(submitSchedulerTask); - } catch (Exception e) { - LOG.warn("Fail to async submit launchable task: [ id: {}, name: {}, job_execution_id: {} ]" - , launchableExchangisTask.getId(), launchableExchangisTask.getName(), launchableExchangisTask.getJobExecutionId(), e); + }, true); + if (submitSchedulerTask.isSubmitAble()) { + submitSchedulerTask.setTenancy(launchableExchangisTask.getExecuteUser()); + try { + taskExecution.submit(submitSchedulerTask); + } catch (Exception e) { + LOG.warn("Fail to async submit launchable task: [ id: {}, name: {}, job_execution_id: {} ]" + , launchableExchangisTask.getId(), launchableExchangisTask.getName(), launchableExchangisTask.getJobExecutionId(), e); + } } } catch (Exception e){ LOG.error("Exception in subscribing launchable tasks, please check your status of database and network", e); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java index 5b4a991dc..1eea657ce 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java @@ -170,7 +170,12 @@ public LogResult logsFromPageAndPath(String logPath, LogQuery logQuery) { rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); } if (!rowIgnore) { - logs.add(new String(line.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8)); + if (line.contains("password")) { + LOG.info("have error information"); + } + if (!line.contains("password")) { + logs.add(new String(line.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8)); + } readLine += 1; } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java index 2368096cc..d2ec8ba6a 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java @@ -69,5 +69,7 @@ public interface ExchangisJobEntityDao { */ void deleteBatch(@Param("ids") List ids); + List getByNameAndProjectId(@Param("jobName") String jobName, @Param("projectId") Long projectId); + List getByNameWithProjectId(@Param("jobName") String jobName, @Param("projectId") Long projectId); } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobFunctionDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobFunctionDao.java new file mode 100644 index 000000000..63ea32c53 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobFunctionDao.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.server.vo.JobFunction; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * @author davidhua + * 2020/4/23 + */ +public interface JobFunctionDao { + /** + * List name referenced + * @param type + * @parm tabName + * @return + */ + List listRefNames(@Param("tabName") String tabName, @Param("type") String type); + + /** + * List function entities + * @param type + * @return + */ + List listFunctions(@Param("tabName") String tabName, @Param("type") String type); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformProcessorDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformProcessorDao.java new file mode 100644 index 000000000..72e26fd85 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformProcessorDao.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; + +/** + * Job transform processor dao + */ +public interface JobTransformProcessorDao { + /** + * Save one entity + * @param processor processor entity + * @return id + */ + Long saveOne(TransformProcessor processor); + + /** + * Get the processor detail (with code content) + * @param id id + * @return processor + */ + TransformProcessor getProcDetail(Long id); + + /** + * Get the processor basic information + * @param id id + * @return processor + */ + TransformProcessor getProcInfo(Long id); + + void updateOne(TransformProcessor processor); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformRuleDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformRuleDao.java new file mode 100644 index 000000000..438b5eebb --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformRuleDao.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * Transform dao in rendering job + */ +public interface JobTransformRuleDao { + + /** + * Get transform rule list + * @param ruleType rule type + * @param dataSourceType data source type + * @return rule list + */ + List getTransformRules(@Param("ruleType") String ruleType, + @Param("dataSourceType") String dataSourceType); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml index db6f4afbd..dbac7c66b 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml @@ -140,7 +140,7 @@ AND job_type = #{jobType} - AND name like concat('%', #{jobName}, '%') + AND name like concat('%', #{jobName}, '%') escape '/' ORDER BY create_time DESC @@ -168,6 +168,14 @@ + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobFunctionDao.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobFunctionDao.xml new file mode 100644 index 000000000..f35747653 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobFunctionDao.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformProcessorMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformProcessorMapper.xml new file mode 100644 index 000000000..0ec314310 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformProcessorMapper.xml @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + exchangis_job_transform_processor + + + + INSERT INTO + (`job_id`, `code_content`, `code_language`, `code_bml_resourceId`, `code_bml_version`, `creator`) + VALUES(#{jobId}, #{codeContent}, #{codeLanguage}, #{codeBmlResourceId,jdbcType=VARCHAR}, #{codeBmlVersion,jdbcType=VARCHAR}, #{creator}); + + + + + + + + UPDATE + SET + `code_content` = #{codeContent}, + `code_language` = #{codeLanguage}, + `code_bml_resourceId` = #{codeBmlResourceId}, + `code_bml_version` = #{codeBmlVersion} + WHERE `id` = #{id}; + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformRuleMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformRuleMapper.xml new file mode 100644 index 000000000..197188db5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformRuleMapper.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + exchangis_job_transform_rule + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml index 684bdab1c..ac29d52cc 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml @@ -49,7 +49,7 @@ - delete * from + delete from where id = #{taskId} @@ -80,7 +80,7 @@ - select DISTINCT dp.id, dp.name, dp.description, dp.create_user, dp.create_time, dp.last_update_user,dp.last_update_time, dp.project_labels,dp.domain,dp.view_users, dp.exec_users, dp.edit_users, du.priv_user - from exchangis_project_info dp inner join exchangis_project_user du on dp.id = du.project_id where du.priv_user = #{createUser} + SELECT DISTINCT dp.id, dp.name, dp.description, + dp.create_user, dp.create_time, dp.last_update_user,dp.last_update_time, dp.project_labels,dp.domain, + dp.view_users, dp.exec_users, dp.edit_users, du.priv_user + FROM exchangis_project_info dp + INNER JOIN exchangis_project_user du + on dp.id = du.project_id and du.priv_user = #{createUser} - ORDER BY create_time DESC + ORDER BY dp.create_time DESC @@ -93,7 +94,7 @@ - @@ -117,7 +118,7 @@ diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/mapper/impl/ProjectUserMapper.xml b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectUserMapper.xml similarity index 71% rename from exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/mapper/impl/ProjectUserMapper.xml rename to exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectUserMapper.xml index c4ac07f5c..f4b3709a6 100644 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/mapper/impl/ProjectUserMapper.xml +++ b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectUserMapper.xml @@ -1,9 +1,9 @@ - + - + @@ -19,6 +19,19 @@ exchangis_project_user + + delete from + + where project_id = #{projectId} + + + diff --git a/exchangis-project/exchangis-project-server/pom.xml b/exchangis-project/exchangis-project-server/pom.xml index 8426b6e94..0853557d1 100644 --- a/exchangis-project/exchangis-project-server/pom.xml +++ b/exchangis-project/exchangis-project-server/pom.xml @@ -5,7 +5,7 @@ exchangis-project com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -18,33 +18,24 @@ - org.apache.linkis - linkis-module - ${linkis.version} - - - - org.apache.linkis - linkis-mybatis - ${linkis.version} + com.webank.wedatasphere.exchangis + exchangis-project-provider + 1.1.2 - - org.apache.commons - commons-math3 - 3.6.1 + com.webank.wedatasphere.exchangis + exchangis-job-server + 1.1.2 com.webank.wedatasphere.exchangis exchangis-dao - 1.0.0 - compile + 1.1.2 - com.webank.wedatasphere.exchangis exchangis-job-common - 1.0.0 + 1.1.2 diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectExceptionCode.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectExceptionCode.java new file mode 100644 index 000000000..e1874cd63 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectExceptionCode.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.project.server.exception; + +/** + * @author jefftlin + * @create 2022-09-13 + **/ +public enum ExchangisProjectExceptionCode { + + UNSUPPORTED_OPERATION(32001); + + private int code; + + public int getCode() { + return code; + } + + ExchangisProjectExceptionCode(int code) { + this.code = code; + } +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java index d725344c4..b6661f1f5 100644 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java @@ -1,15 +1,26 @@ package com.webank.wedatasphere.exchangis.project.server.restful; +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; import com.webank.wedatasphere.exchangis.common.pager.PageResult; import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; -import com.webank.wedatasphere.exchangis.project.server.entity.ExchangisProject; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.entity.domain.OperationType; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; -import com.webank.wedatasphere.exchangis.project.server.utils.AuthorityUtils; +import com.webank.wedatasphere.exchangis.project.server.utils.ProjectAuthorityUtils; +import com.webank.wedatasphere.exchangis.project.server.utils.ExchangisProjectConfiguration; import com.webank.wedatasphere.exchangis.project.server.utils.ExchangisProjectRestfulUtils; -import com.webank.wedatasphere.exchangis.project.server.vo.ExchangisProjectInfo; -import com.webank.wedatasphere.exchangis.project.server.vo.ProjectQueryVo; -import org.apache.commons.lang.StringUtils; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectUserVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ProjectQueryVo; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.math3.util.Pair; import org.apache.linkis.common.utils.JsonUtils; import org.apache.linkis.server.Message; @@ -41,22 +52,30 @@ public class ExchangisProjectRestfulApi { @Resource private ProjectService projectService; + /** + * JobInfo service + */ + @Resource + private JobInfoService jobInfoService; + /** * Project query * @param request http request * @param queryVo query vo * @param current current page * @param size size - * @param name name * @return message */ @RequestMapping( value = "projects", method = {RequestMethod.POST, RequestMethod.GET}) public Message queryProjects(HttpServletRequest request, @RequestBody ProjectQueryVo queryVo, @RequestParam(value = "current", required = false) Integer current, - @RequestParam(value = "size", required = false) Integer size, - @RequestParam(value = "name", required = false) String name) { - String username = SecurityFilter.getLoginUsername(request); + @RequestParam(value = "size", required = false) Integer size) { + String username = UserUtils.getLoginUser(request); + String name = queryVo.getName(); + if (StringUtils.isNotBlank(name)) { + name = name.replaceAll("_", "/_"); + } Optional.ofNullable(current).ifPresent(queryVo::setCurrent); Optional.ofNullable(size).ifPresent(queryVo::setSize); Optional.ofNullable(name).ifPresent(queryVo::setName); @@ -66,7 +85,7 @@ public Message queryProjects(HttpServletRequest request, return pageResult.toMessage(); } catch (Exception t) { LOG.error("Failed to query project list for user {}", username, t); - return Message.error("Failed to query project list (获取工程列表失败)"); + return Message.error("Failed to query project list (获取项目列表失败)"); } } @@ -83,15 +102,15 @@ public Message queryProjectDetail(HttpServletRequest request, try { ExchangisProjectInfo project = projectService.getProjectDetailById(projectId); if (Objects.isNull(project)){ - return Message.error("Not found the project (找不到对应工程)"); + return Message.error("Not found the project (找不到对应项目)"); } - if (!hasAuthority(username, project)){ - return Message.error("You have no permission to query (没有工程查看权限)"); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, project, OperationType.PROJECT_QUERY)){ + return Message.error("You have no permission to query (没有项目查看权限)"); } return Message.ok().data("item", project); } catch (Exception t) { LOG.error("failed to get project detail for user {}", username, t); - return Message.error("Fail to get project detail (获取工程详情失败)"); + return Message.error("Fail to get project detail (获取项目详情失败)"); } } @@ -105,22 +124,38 @@ public Message queryProjectDetail(HttpServletRequest request, @RequestMapping(value = "createProject", method = RequestMethod.POST) public Message createProject(@Validated @RequestBody ExchangisProjectInfo projectVo, BindingResult result, HttpServletRequest request) { + if (ExchangisProjectConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to create (没有项目创建权限)"); + } if (result.hasErrors()){ return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); } - String username = SecurityFilter.getLoginUsername(request); + + String username = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + if (StringUtils.isBlank(projectVo.getViewUsers()) || !StringUtils.contains(projectVo.getViewUsers(), username)) { + projectVo.setViewUsers(username + "," + projectVo.getViewUsers()); + } + if (StringUtils.isBlank(projectVo.getEditUsers()) || !StringUtils.contains(projectVo.getEditUsers(), username)) { + projectVo.setEditUsers(username + "," + projectVo.getEditUsers()); + } + if (StringUtils.isBlank(projectVo.getExecUsers()) || !StringUtils.contains(projectVo.getExecUsers(), username)) { + projectVo.setExecUsers(username + "," + projectVo.getExecUsers()); + } + try { if (projectService.existsProject(null, projectVo.getName())){ - return Message.error("Have the same name project (存在同名工程)"); + return Message.error("Have the same name project (存在同名项目)"); } LOG.info("CreateProject vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); long projectId = projectService.createProject(projectVo, username); - return ExchangisProjectRestfulUtils.dealOk("创建工程成功", + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, String.valueOf(projectId), "Project name is: " + projectVo.getName(), OperateTypeEnum.CREATE, request); + return ExchangisProjectRestfulUtils.dealOk("创建项目成功", new Pair<>("projectName", projectVo.getName()), new Pair<>("projectId", projectId)); } catch (Exception t) { LOG.error("Failed to create project for user {}", username, t); - return Message.error("Fail to create project (创建工程失败)"); + return Message.error("Fail to create project (创建项目失败)"); } } /** @@ -131,17 +166,15 @@ public Message createProject(@Validated @RequestBody ExchangisProjectInfo projec */ @RequestMapping( value = "/check/{name}", method = RequestMethod.POST) public Message getProjectByName(HttpServletRequest request, @PathVariable("name") String name) { - String username = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); try { ExchangisProjectInfo projectInfo = projectService.selectByName(name); - return ExchangisProjectRestfulUtils.dealOk("根据名字获取工程成功", + return ExchangisProjectRestfulUtils.dealOk("根据名字获取项目成功", new Pair<>("projectInfo",projectInfo)); } catch (Exception t) { - LOG.error("Failed to delete project for user {}", username, t); - return Message.error("Failed to delete project (根据名字获取工程失败)"); + LOG.error("Failed to get project for user {}", username, t); + return Message.error("Failed to get project (根据名字获取项目失败)"); } - - } @@ -154,34 +187,46 @@ public Message getProjectByName(HttpServletRequest request, @PathVariable("name" @RequestMapping( value = "updateProject", method = RequestMethod.PUT) public Message updateProject(@Validated({UpdateGroup.class, Default.class}) @RequestBody ExchangisProjectInfo projectVo , BindingResult result, HttpServletRequest request) { + if (ExchangisProjectConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有项目更新权限)"); + } if (result.hasErrors()){ return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); } - String username = SecurityFilter.getLoginUsername(request); - try { - ExchangisProjectInfo projectStored = projectService.getProjectById(Long.valueOf(projectVo.getId())); + String oringinUser = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + //String username = SecurityFilter.getLoginUsername(request); + if (StringUtils.isBlank(projectVo.getViewUsers()) || !StringUtils.contains(projectVo.getViewUsers(), username)) { + projectVo.setViewUsers(username + "," + projectVo.getViewUsers()); + } + if (StringUtils.isBlank(projectVo.getEditUsers()) || !StringUtils.contains(projectVo.getEditUsers(), username)) { + projectVo.setEditUsers(username + "," + projectVo.getEditUsers()); + } + if (StringUtils.isBlank(projectVo.getExecUsers()) || !StringUtils.contains(projectVo.getExecUsers(), username)) { + projectVo.setExecUsers(username + "," + projectVo.getExecUsers()); + } - if (!hasAuthority(username, projectStored)) { + try { + ExchangisProjectInfo projectStored = projectService.getProjectDetailById(Long.valueOf(projectVo.getId())); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectStored, OperationType.PROJECT_ALTER)) { return Message.error("You have no permission to update (没有项目的更新权限)"); } - /*if (!AuthorityUtils.hasOwnAuthority(Long.parseLong(projectVo.getId()), username) && !AuthorityUtils.hasEditAuthority(Long.parseLong(projectVo.getId()), username)) { - return Message.error("You have no permission to update (没有编辑权限,无法更新项目)"); - }*/ - String domain = projectStored.getDomain(); if (StringUtils.isNotBlank(domain) && !ExchangisProject.Domain.STANDALONE.name() .equalsIgnoreCase(domain)){ return Message.error("Cannot update the outer project (无法更新来自 " + domain + " 的外部项目)"); } + LOG.info("UpdateProject vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); projectService.updateProject(projectVo, username); - return ExchangisProjectRestfulUtils.dealOk("更新工程成功", + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, projectVo.getId(), "Project name is: " + projectVo.getName(), OperateTypeEnum.UPDATE, request); + return ExchangisProjectRestfulUtils.dealOk("更新项目成功", new Pair<>("projectName", projectVo.getName()), new Pair<>("projectId", projectVo.getId())); } catch (Exception t) { LOG.error("Failed to update project for user {}", username, t); - return Message.error("Fail to update project (更新工程失败)"); + return Message.error("Fail to update project (更新项目失败)"); } } @@ -191,39 +236,61 @@ public Message updateProject(@Validated({UpdateGroup.class, Default.class}) @Req * @param id project id * @return */ - @RequestMapping( value = "/projects/{id:\\d+}", method = RequestMethod.DELETE) + @DeleteMapping( value = "/projects/{id:\\d+}") public Message deleteProject(HttpServletRequest request, @PathVariable("id") Long id) { - String username = SecurityFilter.getLoginUsername(request); + if (ExchangisProjectConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有编辑权限,无法删除项目)"); + } + String oringinUser = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); try { - ExchangisProjectInfo projectInfo = projectService.getProjectById(id); - - if (!hasAuthority(username, projectInfo)) { - return Message.error("You have no permission to delete (删除工程失败)"); + ExchangisProjectInfo projectInfo = projectService.getProjectDetailById(id); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectInfo, OperationType.PROJECT_ALTER)) { + return Message.error("You have no permission to delete (没有权限删除项目!)"); } - /* if (!AuthorityUtils.hasOwnAuthority(id, username) && !AuthorityUtils.hasEditAuthority(id, username)) { - return Message.error("You have no permission to update (没有编辑权限,无法删除项目)"); - }*/ String domain = projectInfo.getDomain(); if (StringUtils.isNotBlank(domain) && !ExchangisProject.Domain.STANDALONE.name() .equalsIgnoreCase(domain)){ return Message.error("Cannot delete the outer project (无法删除来自 " + domain + " 的外部项目)"); } + + // 校验是否有任务 + ExchangisJobQueryVo queryVo = new ExchangisJobQueryVo(id, null, null); + PageResult exchangisJobVoPageResult = jobInfoService.queryJobList(queryVo); + if (Objects.nonNull(exchangisJobVoPageResult) && Objects.nonNull(exchangisJobVoPageResult.getList()) + && exchangisJobVoPageResult.getList().size() > 0) { + return Message.error("Jobs already exist under this project and the project cannot be deleted (该项目下已存在子任务,无法删除)"); + } + projectService.deleteProject(id); - return ExchangisProjectRestfulUtils.dealOk("删除工程成功888"); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, id.toString(), "Project", OperateTypeEnum.DELETE, request); + return ExchangisProjectRestfulUtils.dealOk("删除项目成功"); } catch (Exception t) { LOG.error("Failed to delete project for user {}", username, t); - return Message.error("Failed to delete project (删除工程失败)"); + return Message.error("Failed to delete project (删除项目失败)"); } } /** - * TODO complete the authority strategy - * @param username username - * @param project project + * get project permission + * @param request http request + * @param id project id * @return */ - private boolean hasAuthority(String username, ExchangisProjectInfo project){ - return Objects.nonNull(project) && username.equals(project.getCreateUser()); + @RequestMapping( value = "/getProjectPermission/{id:\\d+}", method = RequestMethod.GET) + public Message getProjectPermission(HttpServletRequest request, @PathVariable("id") Long id) { + String username = SecurityFilter.getLoginUsername(request); + try { + ExchangisProjectUserVo exchangisProjectUserVo = new ExchangisProjectUserVo(id, username); + ExchangisProjectUser exchangisProjectUser = projectService.queryProjectUser(exchangisProjectUserVo); + + return ExchangisProjectRestfulUtils.dealOk("根据项目ID和用户获取项目权限信息成功", + new Pair<>("exchangisProjectUser", new ExchangisProjectUserVo(exchangisProjectUser))); + } catch (Exception t) { + LOG.error("Failed to get exchangisProjectUser for project {} and privUser {}", id, username); + return Message.error("Failed to get project (根据项目ID和用户获取项目权限信息失败)"); + } } + } diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java index 46253968d..2e8457219 100644 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java @@ -1,14 +1,21 @@ package com.webank.wedatasphere.exchangis.project.server.restful.external; +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.common.pager.PageResult; import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; -import com.webank.wedatasphere.exchangis.project.server.entity.ExchangisProject; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.entity.domain.OperationType; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectAppVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; -import com.webank.wedatasphere.exchangis.project.server.utils.AuthorityUtils; import com.webank.wedatasphere.exchangis.project.server.utils.ExchangisProjectRestfulUtils; -import com.webank.wedatasphere.exchangis.project.server.vo.ExchangisProjectAppVo; -import com.webank.wedatasphere.exchangis.project.server.vo.ExchangisProjectAppVo; -import com.webank.wedatasphere.exchangis.project.server.vo.ExchangisProjectInfo; -import org.apache.commons.lang.StringUtils; +import com.webank.wedatasphere.exchangis.project.server.utils.ProjectAuthorityUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.math3.util.Pair; import org.apache.linkis.common.utils.JsonUtils; import org.apache.linkis.server.Message; @@ -38,6 +45,12 @@ public class ExchangisProjectDssAppConnRestfulApi { @Resource private ProjectService projectService; + /** + * JobInfo service + */ + @Resource + private JobInfoService jobInfoService; + @RequestMapping(value = "", method = RequestMethod.POST) public Message createProject(@Validated @RequestBody ExchangisProjectAppVo project, BindingResult result, HttpServletRequest request){ @@ -45,7 +58,20 @@ public Message createProject(@Validated @RequestBody ExchangisProjectAppVo proje if (result.hasErrors()){ return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); } - String username = SecurityFilter.getLoginUsername(request); + + String oringinUser = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + if (StringUtils.isBlank(projectVo.getViewUsers()) || !StringUtils.contains(projectVo.getViewUsers(), username)) { + projectVo.setViewUsers(username + projectVo.getViewUsers()); + } + if (StringUtils.isBlank(projectVo.getEditUsers()) || !StringUtils.contains(projectVo.getEditUsers(), username)) { + projectVo.setEditUsers(username + projectVo.getEditUsers()); + } + if (StringUtils.isBlank(projectVo.getExecUsers()) || !StringUtils.contains(projectVo.getExecUsers(), username)) { + projectVo.setExecUsers(username + projectVo.getExecUsers()); + + } + try { LOG.info("CreateProject from DSS AppConn, vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); if (projectService.existsProject(null, projectVo.getName())){ @@ -53,6 +79,7 @@ public Message createProject(@Validated @RequestBody ExchangisProjectAppVo proje } long projectIdd = projectService.createProject(projectVo, username); String projectId = String.valueOf(projectIdd); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, String.valueOf(projectId), "Project name is: " + projectVo.getName(), OperateTypeEnum.CREATE, request); return ExchangisProjectRestfulUtils.dealOk("创建工程成功", new Pair<>("projectName", projectVo.getName()), new Pair<>("projectId", projectId)); @@ -75,24 +102,17 @@ public Message updateProject(@PathVariable("id") Long id, @Validated({UpdateGrou if (result.hasErrors()){ return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); } - String username = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); try { - ExchangisProjectInfo projectStored = projectService.getProjectById(Long.valueOf(projectVo.getId())); - - if (!hasAuthority(username, projectStored)) { + ExchangisProjectInfo projectStored = projectService.getProjectDetailById(Long.valueOf(projectVo.getId())); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectStored, OperationType.PROJECT_ALTER)) { return Message.error("You have no permission to update (没有项目的更新权限)"); } -// if (!AuthorityUtils.hasOwnAuthority(Long.parseLong(projectVo.getId()), username) && !AuthorityUtils.hasEditAuthority(Long.parseLong(projectVo.getId()), username)) { -// return Message.error("You have no permission to update (没有编辑权限,无法更新项目)"); -// } - String domain = projectStored.getDomain(); - if (StringUtils.isNotBlank(domain) && !ExchangisProject.Domain.STANDALONE.name() - .equalsIgnoreCase(domain)){ - return Message.error("Cannot update the outer project (无法更新来自 " + domain + " 的外部项目)"); - } LOG.info("UpdateProject vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); projectService.updateProject(projectVo, username); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, id.toString(), "Project name is: " + projectVo.getName(), OperateTypeEnum.UPDATE, request); return ExchangisProjectRestfulUtils.dealOk("更新工程成功", new Pair<>("projectName", projectVo.getName()), new Pair<>("projectId", projectVo.getId())); @@ -110,19 +130,25 @@ public Message updateProject(@PathVariable("id") Long id, @Validated({UpdateGrou */ @RequestMapping( value = "/{name}", method = RequestMethod.POST) public Message deleteProject(HttpServletRequest request, @PathVariable("name") String name) { - String username = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); try { ExchangisProjectInfo projectInfo = projectService.selectByName(name); -// if (!hasAuthority(username, projectInfo)){ -// return Message.error("You have no permission to delete (删除工程失败)"); -// } -// String domain = projectInfo.getDomain(); -// if (StringUtils.isNotBlank(domain) && !ExchangisProject.Domain.STANDALONE.name() -// .equalsIgnoreCase(domain)){ -// return Message.error("Cannot delete the outer project (无法删除来自 " + domain + " 的外部项目)"); -// } + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectInfo, OperationType.PROJECT_ALTER)) { + return Message.error("You have no permission to delete (删除项目失败)"); + } + + // 校验是否有任务 + ExchangisJobQueryVo queryVo = new ExchangisJobQueryVo(Long.parseLong(projectInfo.getId()), null, null); + PageResult exchangisJobVoPageResult = jobInfoService.queryJobList(queryVo); + if (Objects.nonNull(exchangisJobVoPageResult) && Objects.nonNull(exchangisJobVoPageResult.getList()) + && exchangisJobVoPageResult.getList().size() > 0) { + return Message.error("Jobs already exist under this project and the project cannot be deleted (该项目下已存在子任务,无法删除)"); + } + projectService.deleteProjectByName(name); - return ExchangisProjectRestfulUtils.dealOk("删除工程成功777"); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, "", "Project name is: " + name, OperateTypeEnum.DELETE, request); + return ExchangisProjectRestfulUtils.dealOk("删除工程成功"); } catch (Exception t) { LOG.error("Failed to delete project for user {}", username, t); return Message.error("Failed to delete project (删除工程失败)"); @@ -139,7 +165,7 @@ public Message deleteProject(HttpServletRequest request, @PathVariable("name") S */ @RequestMapping( value = "/check/{name}", method = RequestMethod.POST) public Message getProjectByName(HttpServletRequest request, @PathVariable("name") String name) { - String username = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); try { ExchangisProjectInfo projectInfo = projectService.selectByName(name); return ExchangisProjectRestfulUtils.dealOk("根据名字获取工程成功", @@ -148,17 +174,6 @@ public Message getProjectByName(HttpServletRequest request, @PathVariable("name" LOG.error("Failed to delete project for user {}", username, t); return Message.error("Failed to delete project (根据名字获取工程失败)"); } - - } - - /** - * @param username username - * @param project project - * @return - */ - private boolean hasAuthority(String username, ExchangisProjectInfo project){ - return Objects.nonNull(project) && username.equals(project.getCreateUser()); - } } diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectJobDssAppConnRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectJobDssAppConnRestfulApi.java new file mode 100644 index 000000000..fc23ec0e0 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectJobDssAppConnRestfulApi.java @@ -0,0 +1,109 @@ +package com.webank.wedatasphere.exchangis.project.server.restful.external; + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectMapper; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectCopyService; +import com.webank.wedatasphere.exchangis.project.server.service.impl.ProjectExportServiceImpl; +import com.webank.wedatasphere.exchangis.project.server.service.impl.ProjectImportServerImpl; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.Context; +import java.rmi.ServerException; +import java.util.Map; + +/** + * Define to support the app conn, in order to distinguish from the inner api + */ +@RestController +@RequestMapping(value = "/dss/exchangis/main/appJob", produces = {"application/json;charset=utf-8"}) +public class ExchangisProjectJobDssAppConnRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisProjectJobDssAppConnRestfulApi.class); + + @Resource + private ProjectImportServerImpl projectImportServer; + + @Resource + private ProjectExportServiceImpl projectExportService; + + @Resource + private ProjectCopyService projectCopyService; + + @Autowired + private ProjectMapper projectMapper; + + @RequestMapping(value = "/import", method = RequestMethod.POST) + public Message importJob(@Context HttpServletRequest request, @RequestBody Map params) throws ServerException, ExchangisJobServerException { + + Message response = null; + String userName = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + try { + LOG.info("param: {}", params); + /*if (!hasAuthority(userName, jobInfoService.getJob(((Integer) params.get("sqoopIds")).longValue(), true))) { + return Message.error("You have no permission to import (没有导入权限)"); + }*/ + response = projectImportServer.importProject(request, params); + LOG.info("import job success"); + } catch (ExchangisJobServerException e) { + String message = "Fail import job [ id: " + params + "] (导入任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, userName, TargetTypeEnum.JOB, "", "Export parameter is: " + params.toString(), OperateTypeEnum.IMPORT, request); + return response; + + } + + @RequestMapping(value = "/export", method = RequestMethod.POST) + public Message exportJob(@Context HttpServletRequest request, @RequestBody Map params) throws ServerException, ExchangisJobServerException { + String userName = UserUtils.getLoginUser(request); + + LOG.info("export function params: {}", params); + String oringinUser = SecurityFilter.getLoginUsername(request); + Message response = null; + try { + /*if (!hasAuthority(userName, jobInfoService.getJob(((Integer) params.get("sqoopIds")).longValue(), true))) { + return Message.error("You have no permission to export (没有导出权限)"); + }*/ + response = projectExportService.exportProject(params, userName, request); + LOG.info("export job success"); + } catch (Exception e) { + String message = "Fail Export job [ id: " + params + "] (导出任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, userName, TargetTypeEnum.JOB, "", "Export parameter is: " + params.toString(), OperateTypeEnum.EXPORT, request); + return response; + } + + @RequestMapping(value = "/copy", method = RequestMethod.POST) + public Message copy(@Context HttpServletRequest request, @RequestBody Map params) throws ServerException { + String userName = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + + LOG.info("copy function params: {}", params); + Message response = null; + try { + response = projectCopyService.copy(params, userName, request); + LOG.info("copy node success"); + } catch (Exception e) { + String message = "Fail Copy project [ id: " + params + "] (导出任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, userName, TargetTypeEnum.JOB, "", "Copy parameter is: " + params.toString(), OperateTypeEnum.COPY, request); + return response; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/IProjectCopyService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectCopyService.java similarity index 83% rename from exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/IProjectCopyService.java rename to exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectCopyService.java index e5d5f724d..9260d02bf 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/IProjectCopyService.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectCopyService.java @@ -1,4 +1,4 @@ -package com.webank.wedatasphere.exchangis.job.server.service; +package com.webank.wedatasphere.exchangis.project.server.service; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; @@ -11,7 +11,7 @@ * @author tikazhang * @Date 2022/4/24 21:11 */ -public interface IProjectCopyService { +public interface ProjectCopyService { /** * Copy node diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectExportService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectExportService.java new file mode 100644 index 000000000..81fddc9eb --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectExportService.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.project.server.service; + +import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import org.apache.linkis.server.Message; + +import javax.servlet.http.HttpServletRequest; +import java.rmi.ServerException; +import java.util.Map; +import java.util.Set; + +/** + * @author tikazhang + * @Date 2022/3/15 9:30 + */ +public interface ProjectExportService { + + /** + * Export exchangis job to BML. + * + * @param username params + * @return + */ + Message exportProject(Map params, String username, HttpServletRequest request) throws ExchangisJobServerException, ServerException; + + ExportedProject export(Long projectId, Map> moduleIdsMap, boolean partial, HttpServletRequest request) throws ExchangisJobServerException; + + Map> getModuleIdsMap(Map params); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/IProjectImportService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectImportService.java similarity index 76% rename from exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/IProjectImportService.java rename to exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectImportService.java index efb451acd..34b1a2219 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/IProjectImportService.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectImportService.java @@ -1,8 +1,7 @@ -package com.webank.wedatasphere.exchangis.job.server.service; +package com.webank.wedatasphere.exchangis.project.server.service; import com.webank.wedatasphere.exchangis.job.server.dto.IdCatalog; import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; -import com.webank.wedatasphere.exchangis.project.server.exception.ExchangisProjectErrorException; import org.apache.linkis.server.Message; import javax.servlet.http.HttpServletRequest; @@ -13,7 +12,7 @@ * @author tikazhang * @Date 2022/3/15 10:01 */ -public interface IProjectImportService { +public interface ProjectImportService { Message importProject(HttpServletRequest req, Map params) throws ExchangisJobServerException, ServerException; IdCatalog importOpt(String projectJson, Long projectId, String versionSuffix, String userName, String importType) throws ExchangisJobServerException; diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java index 652f30dc5..9c6556a34 100644 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java @@ -3,12 +3,10 @@ import com.webank.wedatasphere.exchangis.common.pager.PageResult; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; -import com.webank.wedatasphere.exchangis.project.server.entity.ExchangisProject; -import com.webank.wedatasphere.exchangis.project.server.vo.ExchangisProjectInfo; -import com.webank.wedatasphere.exchangis.project.server.vo.ProjectQueryVo; - -import javax.servlet.http.HttpServletRequest; -import java.util.List; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectUserVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ProjectQueryVo; /** * Project query @@ -35,7 +33,7 @@ public interface ProjectService { * @param projectVo project vo * @param userName userName */ - void updateProject(ExchangisProjectInfo projectVo, String userName); + void updateProject(ExchangisProjectInfo projectVo, String userName); /** * Query the page result @@ -67,4 +65,6 @@ public interface ProjectService { ExchangisProjectInfo selectByName(String name); + ExchangisProjectUser queryProjectUser(ExchangisProjectUserVo exchangisProjectUserVo); + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ProjectCopyServiceImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectCopyServiceImpl.java similarity index 80% rename from exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ProjectCopyServiceImpl.java rename to exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectCopyServiceImpl.java index e6f935b32..58bb7e94a 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ProjectCopyServiceImpl.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectCopyServiceImpl.java @@ -1,4 +1,4 @@ -package com.webank.wedatasphere.exchangis.job.server.service.impl; +package com.webank.wedatasphere.exchangis.project.server.service.impl; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.Lists; @@ -9,8 +9,8 @@ import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; import com.webank.wedatasphere.exchangis.job.server.mapper.ExchangisJobEntityDao; import com.webank.wedatasphere.exchangis.job.server.restful.external.ModuleEnum; -import com.webank.wedatasphere.exchangis.job.server.service.IProjectCopyService; -import com.webank.wedatasphere.exchangis.job.server.service.IProjectImportService; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectCopyService; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectImportService; import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; import org.apache.commons.lang.StringUtils; import org.apache.linkis.server.BDPJettyServerHelper; @@ -22,24 +22,21 @@ import javax.annotation.Resource; import javax.servlet.http.HttpServletRequest; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; /** * @author tikazhang * @Date 2022/4/24 21:15 */ @Service -public class ProjectCopyServiceImpl implements IProjectCopyService { +public class ProjectCopyServiceImpl implements ProjectCopyService { private static final Logger LOG = LoggerFactory.getLogger(ProjectCopyServiceImpl.class); @Autowired - private IProjectImportService projectImportService; + private ProjectImportService projectImportService; @Autowired - private DefaultJobInfoService defaultJobInfoService; + private ProjectExportServiceImpl projectExportService; @Autowired private ProjectImportServerImpl projectImportServer; @@ -52,9 +49,9 @@ public Message copy(Map params, String userName, HttpServletRequ LOG.info("begin to copy in project params is {}", params); //Long projectId = Long.parseLong(params.get("projectId").toString()); Boolean partial = (Boolean) params.get("partial"); - Map> moduleIdsMap = defaultJobInfoService.getModuleIdsMap(params); + Map> moduleIdsMap = projectExportService.getModuleIdsMap(params); - Set longs = moduleIdsMap.get(ModuleEnum.SQOOP_IDS.getName()); + Set longs = moduleIdsMap.get(Objects.isNull(params.get("dataXIds")) ? ModuleEnum.SQOOP_IDS.getName() : ModuleEnum.DATAX_IDS.getName()); List list1 = new ArrayList(longs); ExchangisJobEntity exchangisJob = this.jobEntityDao.getBasicInfo(list1.get(0)); Long projectId = exchangisJob.getProjectId(); @@ -67,7 +64,7 @@ public Message copy(Map params, String userName, HttpServletRequ } String contextIdStr = (String) params.get("contextID"); - ExportedProject exportedProject = defaultJobInfoService.export(projectId, moduleIdsMap, partial, request); + ExportedProject exportedProject = projectExportService.export(projectId, moduleIdsMap, partial, request); copySqoop(moduleIdsMap, exportedProject); diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectExportServiceImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectExportServiceImpl.java new file mode 100644 index 000000000..2abc89864 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectExportServiceImpl.java @@ -0,0 +1,204 @@ +package com.webank.wedatasphere.exchangis.project.server.service.impl; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.restful.external.ModuleEnum; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectExportService; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.bml.client.BmlClient; +import org.apache.linkis.bml.client.BmlClientFactory; +import org.apache.linkis.bml.protocol.BmlUploadResponse; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.rmi.ServerException; +import java.util.*; +import java.util.stream.Collectors; + +/** + * @author jefftlin + * @date 2023/7/13 + */ +@Service +public class ProjectExportServiceImpl implements ProjectExportService { + + private static final Logger LOG = LoggerFactory.getLogger(ProjectExportServiceImpl.class); + + /** + * Project service + */ + @Resource + private ProjectService projectService; + + @Resource + private JobInfoService jobInfoService; + + @Override + public Message exportProject(Map params, String userName, HttpServletRequest request) throws ExchangisJobServerException, ServerException { + ExportedProject exportedProject = null; + Long projectId = Long.parseLong(params.get("projectId").toString()); + Boolean partial = (Boolean) params.get("partial"); + Map> moduleIdsMap = getModuleIdsMap(params); + + LOG.info("export project, user: {}, project: {}, partial:{}", userName, projectId, partial); + exportedProject = export(projectId, moduleIdsMap, partial, request); + String exported = null; + try { + exported = BDPJettyServerHelper.jacksonJson().writeValueAsString(exportedProject); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + + LOG.info("projectName: {}, exported:{}", exportedProject.getName(), exported); + BmlClient bmlClient = BmlClientFactory.createBmlClient(userName); + BmlUploadResponse bmlUploadResponse = bmlClient.uploadShareResource(userName, exportedProject.getName(), + "exchangis_exported_" + UUID.randomUUID(), new ByteArrayInputStream(exported.getBytes(StandardCharsets.UTF_8))); + + if (bmlUploadResponse == null || !bmlUploadResponse.isSuccess()) { + throw new ServerException("cannot upload exported data to BML"); + } + + LOG.info("{} is exporting the project, uploaded to BML the resourceID is {} and the version is {}", + userName, bmlUploadResponse.resourceId(), bmlUploadResponse.version()); + + Message message = Message.ok("export job") + .data("resourceId", bmlUploadResponse.resourceId()) + .data("version", bmlUploadResponse.version()); + return message; + } + + @Override + public ExportedProject export(Long projectId, Map> moduleIdsMap, boolean partial, HttpServletRequest request) throws ExchangisJobServerException { + ExportedProject exportedProject = new ExportedProject(); + ExchangisProjectInfo project = projectService.getProjectDetailById(projectId); + + LOG.info("execute export method! export project is {}.", project.getName()); + exportedProject.setName(project.getName()); + + setSqoop(projectId, moduleIdsMap, partial, exportedProject, request); + + setDatax(projectId, moduleIdsMap, partial, exportedProject, request); + + return exportedProject; + } + + private void setSqoop(Long projectId, Map> moduleIdsMap, boolean partial, ExportedProject exportedProject, HttpServletRequest request) throws ExchangisJobServerException { + List sqoops = new ArrayList<>(); + LOG.info("Request: {}", request); + if (partial) { + Set longs = moduleIdsMap.get(ModuleEnum.SQOOP_IDS.getName()); + if (longs.size() > 0) { + for (Long id : longs) { + LOG.info("id: {}", id); + ExchangisJobVo job = jobInfoService.getJob(id, false); + + String sqoopStr = null; + try { + sqoopStr = BDPJettyServerHelper.jacksonJson().writeValueAsString(job); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + + LOG.info("sqoopStr:{}", sqoopStr); + LOG.info("ExchangisJobVo sqoop: {}", job.getContent()); + LOG.info("getCreateTime: {}", job.getId()); + LOG.info("executorUser: {}", job.getExecuteUser()); + sqoops.add(job); + } + exportedProject.setSqoops(sqoops); + } + + } else { + LOG.info("Through request {} and projectId {} get Sqoopjob", request, projectId); + sqoops = jobInfoService.getSubJobList(request, projectId); + exportedProject.setSqoops(sqoops); + //exportedProject.setSqoops(jobInfoService.getByProject(request, projectId)); + } + LOG.info("exporting project, export sqoopJob: {}", exportedProject); + } + + private void setDatax(Long projectId, Map> moduleIdsMap, boolean partial, ExportedProject exportedProject, HttpServletRequest request) throws ExchangisJobServerException { + List dataxs = new ArrayList<>(); + LOG.info("Request: {}", request); + if (partial) { + Set longs = moduleIdsMap.get(ModuleEnum.DATAX_IDS.getName()); + if (longs.size() > 0) { + for (Long id : longs) { + LOG.info("id: {}", id); + ExchangisJobVo job = jobInfoService.getJob(id, false); + + String dataxStr = null; + try { + dataxStr = BDPJettyServerHelper.jacksonJson().writeValueAsString(job); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + + LOG.info("dataxStr:{}", dataxStr); + LOG.info("ExchangisJobVo sqoop: {}", job.getContent()); + LOG.info("getCreateTime: {}", job.getId()); + LOG.info("executorUser: {}", job.getExecuteUser()); + dataxs.add(job); + } + exportedProject.setDataxes(dataxs); + } + + } else { + LOG.info("Through request {} and projectId {} get Dataxjob", request, projectId); + dataxs = jobInfoService.getSubJobList(request, projectId); + exportedProject.setSqoops(dataxs); + } + LOG.info("exporting project, export dataxJob: {}", exportedProject); + + } + + /** + * 获取需要导出对象集合 + * + * @param params + * @return + */ + @Override + public Map> getModuleIdsMap(Map params) { + + Map> map = Maps.newHashMap(); + String sqoopIdsStr = null; + if(params.get("sqoopIds") != null) { + sqoopIdsStr = params.get("sqoopIds").toString(); + } + String dataxIdsStr = null; + if(params.get("dataXIds") != null) { + dataxIdsStr = params.get("dataXIds").toString(); + } + + Set sqoopIds = Sets.newHashSet(); + Set dataxIds = Sets.newHashSet(); + + if (StringUtils.isNotEmpty(sqoopIdsStr)) { + sqoopIds = Arrays.stream(StringUtils.split(sqoopIdsStr, ",")) + .map(Long::parseLong).collect(Collectors.toSet()); + } + if (StringUtils.isNotEmpty(dataxIdsStr)) { + dataxIds = Arrays.stream(StringUtils.split(dataxIdsStr, ",")) + .map(Long::parseLong).collect(Collectors.toSet()); + } + map.put("sqoopIds", sqoopIds); + map.put("dataXIds", dataxIds); + LOG.info("The objects to be exported are: {}", map); + return map; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ProjectImportServerImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectImportServerImpl.java similarity index 80% rename from exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ProjectImportServerImpl.java rename to exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectImportServerImpl.java index 0cb3ee3cd..acae2fa98 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ProjectImportServerImpl.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectImportServerImpl.java @@ -1,15 +1,13 @@ -package com.webank.wedatasphere.exchangis.job.server.service.impl; +package com.webank.wedatasphere.exchangis.project.server.service.impl; import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; import com.webank.wedatasphere.exchangis.job.server.dto.IdCatalog; import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; -import com.webank.wedatasphere.exchangis.job.server.service.IProjectImportService; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectImportService; import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; -import com.webank.wedatasphere.exchangis.project.server.entity.ExchangisProject; -import com.webank.wedatasphere.exchangis.project.server.exception.ExchangisProjectErrorException; -import com.webank.wedatasphere.exchangis.project.server.mapper.ProjectMapper; -import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectMapper; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.linkis.bml.client.BmlClient; @@ -40,7 +38,7 @@ */ @Service -public class ProjectImportServerImpl implements IProjectImportService { +public class ProjectImportServerImpl implements ProjectImportService { private static final Logger LOG = LoggerFactory.getLogger(ProjectImportServerImpl.class); @@ -60,8 +58,9 @@ public Message importProject(HttpServletRequest req, Map params) //String resourceId = "99763d27-a35e-43f2-829b-100830bca538"; String resourceId = (String) params.get("resourceId"); String version = (String) params.get("version"); + Long projectId = (Long) params.get("projectId"); //Long projectId = Long.parseLong("1497870871035973669"); - Long projectId = Long.parseLong("111111"); + //Long projectId = Long.parseLong("111111"); String projectVersion = (String) params.get("projectVersion"); String flowVersion = (String) params.get("flowVersion"); String versionSuffix = projectVersion + "_" + flowVersion; @@ -129,24 +128,27 @@ else if (projects.size() == 1) { } } - private void importDatax(Long projectId, String versionSuffix, ExportedProject exportedProject, IdCatalog idCatalog, String userName, String importType) { - List dataxes = exportedProject.getDataxes(); - if (dataxes == null) { + private void importDatax(Long projectId, String versionSuffix, ExportedProject exportedProject, IdCatalog idCatalog, String userName, String importType) throws ExchangisJobServerException { + + List dataxs = exportedProject.getDataxes(); + if (dataxs == null) { return; } - for (ExchangisJobVo datax : dataxes) { - Long oldId = datax.getId(); - datax.setProjectId(projectId); - datax.setJobName(updateName(datax.getJobName(), versionSuffix)); - //Long existingId = (long) 66; - Long existingId = jobInfoService.getByNameWithProjectId(datax.getJobName(), projectId).get(0).getId(); - //Long existingId = jobInfoService.getByNameWithProjectId(datax.getJobName(), projectId); - if (existingId != null) { - idCatalog.getSqoop().put(oldId, existingId); - } else { - jobInfoService.createJob(datax); - idCatalog.getSqoop().put(oldId, datax.getId()); - } + List projects = projectMapper.getDetailByName(exportedProject.getName()); + if (projects.size() == 0) { + ExchangisProject project = new ExchangisProject(); + project.setName(exportedProject.getName()); + project.setCreateTime(Calendar.getInstance().getTime()); + project.setCreateUser(userName); + Long newProjectId = projectMapper.insertOne(project); + List newProjects = projectMapper.getDetailByName(exportedProject.getName()); + addSqoopTask (dataxs, newProjects, versionSuffix, idCatalog, projectId, importType); + } + else if (projects.size() == 1) { + addSqoopTask (dataxs, projects, versionSuffix, idCatalog, projectId, importType); + } + else { + throw new ExchangisJobServerException(31101, "Already exits duplicated project name(存在重复项目名称) projectName is:" + "[" + exportedProject.getName() + "]"); } } @@ -155,27 +157,26 @@ public void addSqoopTask (List sqoops, List pr Long projectIdProd = projects.get(0).getId(); Long oldId = sqoop.getId(); if (importType.equals("import")) { - sqoop.setProjectId(projectIdProd); + sqoop.setProjectId(projectId); } sqoop.setJobName(updateName(sqoop.getJobName(), versionSuffix)); //Long existingId = (long) 55; LOG.info("oldId: {}, projectid: {}, jobName: {}", sqoop.getId(), sqoop.getProjectId(), sqoop.getJobName()); - LOG.info("jobByNameWithProjectId: {}", jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectIdProd)); + LOG.info("jobByNameWithProjectId: {}", jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId)); Long existingId; - if (jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectIdProd) == null || jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId).size() == 0) { + if (jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId) == null || jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId).size() == 0) { existingId = null; } else { - existingId = jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectIdProd).get(0).getId(); + existingId = jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId).get(0).getId(); } //Long existingId = jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId); if (existingId != null) { idCatalog.getSqoop().put(oldId, existingId); throw new ExchangisJobServerException(31101, "Already exits duplicated job name(存在重复任务名称) jobName is:" + "[" + sqoop.getJobName() + "]"); } else { - //sqoop.setJobName("hahaha"); LOG.info("Sqoop job content is: {}, Modify user is: {}, jobType is: {}", sqoop.getContent(), sqoop.getExecuteUser(), sqoop.getJobType()); ExchangisJobVo jobVo = jobInfoService.createJob(sqoop); - LOG.info("oldId: {}, newid: {}, jobName0000000: {}", sqoop.getId(), jobVo.getId(), jobVo.getJobName()); + LOG.info("oldId: {}, newid: {}, jobName: {}", sqoop.getId(), jobVo.getId(), jobVo.getJobName()); idCatalog.getSqoop().put(oldId, jobVo.getId()); } } diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java index e28df5556..89f3e12e6 100644 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java @@ -10,13 +10,14 @@ import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; -import com.webank.wedatasphere.exchangis.project.server.entity.ExchangisProjectUser; -import com.webank.wedatasphere.exchangis.project.server.mapper.ProjectMapper; -import com.webank.wedatasphere.exchangis.project.server.entity.ExchangisProject; -import com.webank.wedatasphere.exchangis.project.server.mapper.ProjectUserMapper; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectMapper; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectUserMapper; import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; -import com.webank.wedatasphere.exchangis.project.server.vo.ExchangisProjectInfo; -import com.webank.wedatasphere.exchangis.project.server.vo.ProjectQueryVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectUserVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ProjectQueryVo; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -61,44 +62,50 @@ public long createProject(ExchangisProjectInfo projectVo, String userName) { project.setCreateTime(Calendar.getInstance().getTime()); this.projectMapper.insertOne(project); - List projectUsers = new ArrayList<>(); - if (project.getViewUsers() != null && project.getViewUsers().length()!=0) { - for (String view : project.getViewUsers().split(",")) { + Map projectUserMap = new HashMap<>(); + if (Objects.nonNull(project.getViewUsers()) && project.getViewUsers().length() != 0) { + for (String viewUser : project.getViewUsers().split(",")) { ExchangisProjectUser projectUser = new ExchangisProjectUser(); projectUser.setProjectId(project.getId()); - projectUser.setPrivUser(view); - projectUser.setPriv(1); + projectUser.setPrivUser(viewUser); + projectUser.setPriv(4); projectUser.setUpdateTime(project.getLastUpdateTime()); - projectUsers.add(projectUser); + projectUserMap.put(viewUser ,projectUser); } } - if (project.getEditUsers() != null && project.getEditUsers().length()!=0) { - for (String edit : project.getEditUsers().split(",")) { - ExchangisProjectUser projectUser = new ExchangisProjectUser(); - projectUser.setProjectId(project.getId()); - projectUser.setPrivUser(edit); - projectUser.setPriv(3); - projectUser.setUpdateTime(project.getLastUpdateTime()); - projectUsers.add(projectUser); + if (Objects.nonNull(project.getEditUsers()) && project.getEditUsers().length() != 0) { + for (String editUser : project.getEditUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(editUser))) { + projectUserMap.get(editUser).setPriv(6); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(project.getId()); + projectUser.setPrivUser(editUser); + projectUser.setPriv(6); + projectUser.setUpdateTime(project.getLastUpdateTime()); + projectUserMap.put(editUser ,projectUser); + } } } - if (project.getExecUsers() != null && project.getExecUsers().length()!=0) { - for (String exec : project.getExecUsers().split(",")) { - ExchangisProjectUser projectUser = new ExchangisProjectUser(); - projectUser.setProjectId(project.getId()); - projectUser.setPrivUser(exec); - projectUser.setPriv(2); - projectUser.setUpdateTime(project.getLastUpdateTime()); - projectUsers.add(projectUser); + if (Objects.nonNull(project.getExecUsers()) && project.getExecUsers().length() != 0) { + for (String execUser : project.getExecUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(execUser))) { + projectUserMap.get(execUser).setPriv(7); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(project.getId()); + projectUser.setPrivUser(execUser); + projectUser.setPriv(7); + projectUser.setUpdateTime(project.getLastUpdateTime()); + projectUserMap.put(execUser ,projectUser); + } } } - ExchangisProjectUser projectUser = new ExchangisProjectUser(); - projectUser.setProjectId(project.getId()); - projectUser.setPrivUser(project.getCreateUser()); - projectUser.setPriv(0); - projectUser.setUpdateTime(project.getLastUpdateTime()); - projectUsers.add(projectUser); - this.projectUserMapper.addProjectUser(projectUsers); + + if(projectUserMap.size() > 0) { + this.projectUserMapper.addProjectUser(new ArrayList<>(projectUserMap.values())); + + } return project.getId(); } @@ -125,44 +132,50 @@ public void updateProject(ExchangisProjectInfo projectVo, String userName) { updatedProject.setLastUpdateTime(Calendar.getInstance().getTime()); this.projectMapper.updateOne(updatedProject); - List projectUsers = new ArrayList<>(); - if (updatedProject.getViewUsers() != null && updatedProject.getViewUsers().length()!=0) { - for (String view : updatedProject.getViewUsers().split(",")) { + Map projectUserMap = new HashMap<>(); + if (Objects.nonNull(updatedProject.getViewUsers()) && updatedProject.getViewUsers().length() != 0) { + for (String viewUser : updatedProject.getViewUsers().split(",")) { ExchangisProjectUser projectUser = new ExchangisProjectUser(); projectUser.setProjectId(updatedProject.getId()); - projectUser.setPrivUser(view); - projectUser.setPriv(1); + projectUser.setPrivUser(viewUser); + projectUser.setPriv(4); projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); - projectUsers.add(projectUser); + projectUserMap.put(viewUser ,projectUser); } } - if (updatedProject.getEditUsers() != null && updatedProject.getEditUsers().length()!=0) { - for (String edit : updatedProject.getEditUsers().split(",")) { - ExchangisProjectUser projectUser = new ExchangisProjectUser(); - projectUser.setProjectId(updatedProject.getId()); - projectUser.setPrivUser(edit); - projectUser.setPriv(3); - projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); - projectUsers.add(projectUser); + if (Objects.nonNull(updatedProject.getEditUsers()) && updatedProject.getEditUsers().length() != 0) { + for (String editUser : updatedProject.getEditUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(editUser))) { + projectUserMap.get(editUser).setPriv(6); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(updatedProject.getId()); + projectUser.setPrivUser(editUser); + projectUser.setPriv(6); + projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); + projectUserMap.put(editUser ,projectUser); + } } } - if (updatedProject.getExecUsers() != null && updatedProject.getExecUsers().length()!=0) { - for (String exec : updatedProject.getExecUsers().split(",")) { - ExchangisProjectUser projectUser = new ExchangisProjectUser(); - projectUser.setProjectId(updatedProject.getId()); - projectUser.setPrivUser(exec); - projectUser.setPriv(2); - projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); - projectUsers.add(projectUser); + if (Objects.nonNull(updatedProject.getExecUsers()) && updatedProject.getExecUsers().length() != 0) { + for (String execUser : updatedProject.getExecUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(execUser))) { + projectUserMap.get(execUser).setPriv(7); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(updatedProject.getId()); + projectUser.setPrivUser(execUser); + projectUser.setPriv(7); + projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); + projectUserMap.put(execUser ,projectUser); + } } } - ExchangisProjectUser projectUser = new ExchangisProjectUser(); - projectUser.setProjectId(updatedProject.getId()); - projectUser.setPrivUser(updatedProject.getCreateUser()); - projectUser.setPriv(0); - projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); - projectUsers.add(projectUser); - this.projectUserMapper.updateProjectUser(projectUsers); + + this.projectUserMapper.deleteProjectUser(Long.valueOf(projectVo.getId())); + if(projectUserMap.size() > 0) { + this.projectUserMapper.addProjectUser(new ArrayList<>(projectUserMap.values())); + } } @Override @@ -242,4 +255,10 @@ public ExchangisProjectInfo selectByName(String name){ } return null; } + + @Override + public ExchangisProjectUser queryProjectUser(ExchangisProjectUserVo exchangisProjectUserVo) { + ExchangisProjectUser projectUser = new ExchangisProjectUser(Long.valueOf(exchangisProjectUserVo.getProjectId()), exchangisProjectUserVo.getPrivUser()); + return this.projectUserMapper.queryProjectUser(projectUser); + } } diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/AuthorityUtils.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/AuthorityUtils.java deleted file mode 100644 index 67a9bf6e7..000000000 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/AuthorityUtils.java +++ /dev/null @@ -1,48 +0,0 @@ -package com.webank.wedatasphere.exchangis.project.server.utils; - -import com.webank.wedatasphere.exchangis.project.server.mapper.ProjectMapper; -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.List; - -/** - * @author tikazhang - * @Date 2022/5/10 20:10 - */ -public class AuthorityUtils { - - @Autowired - private static ProjectMapper projectMapper; - - public static boolean hasViewAuthority(long projectId, String loginUser) { - List authoritis = projectMapper.getAuthoritis(projectId, loginUser); - if (authoritis.contains("1")) { - return true; - } - return false; - } - - public static boolean hasExecAuthority(long projectId, String loginUser) { - List authoritis = projectMapper.getAuthoritis(projectId, loginUser); - if (authoritis.contains("2")) { - return true; - } - return false; - } - - public static boolean hasEditAuthority(long projectId, String loginUser) { - List authoritis = projectMapper.getAuthoritis(projectId, loginUser); - if (authoritis.contains("3")) { - return true; - } - return false; - } - - public static boolean hasOwnAuthority(long projectId, String loginUser) { - List authoritis = projectMapper.getAuthoritis(projectId, loginUser); - if (authoritis.contains("0")) { - return true; - } - return false; - } -} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectConfiguration.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectConfiguration.java new file mode 100644 index 000000000..81ae25a13 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectConfiguration.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.project.server.utils; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * @author + * @Date + */ +public class ExchangisProjectConfiguration { + public static final CommonVars LIMIT_INTERFACE = CommonVars.apply("wds.exchangis.limit.interface.value", true); + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ProjectAuthorityUtils.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ProjectAuthorityUtils.java new file mode 100644 index 000000000..6fd06bf8f --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ProjectAuthorityUtils.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.project.server.utils; + +import com.webank.wedatasphere.exchangis.project.entity.domain.OperationType; +import com.webank.wedatasphere.exchangis.project.server.exception.ExchangisProjectErrorException; +import com.webank.wedatasphere.exchangis.project.server.exception.ExchangisProjectExceptionCode; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * @author tikazhang + * @Date 2022/5/10 20:10 + */ +public class ProjectAuthorityUtils { + + /** + * @param username username + * @param project project + * @param operationType enum("PROJECT_QUERY","PROJECT_ALTER") + * @return + */ + public static boolean hasProjectAuthority(String username, ExchangisProjectInfo project, OperationType operationType) throws ExchangisProjectErrorException { + if (StringUtils.isNotEmpty(username) && + Objects.nonNull(project) && + Objects.nonNull(operationType)) { + // Create users have all rights to the project. + List viewUsers = Arrays.stream(project.getViewUsers().split(",")).distinct().collect(Collectors.toList()); + List editUsers = Arrays.stream(project.getEditUsers().split(",")).distinct().collect(Collectors.toList()); + List execUsers = Arrays.stream(project.getExecUsers().split(",")).distinct().collect(Collectors.toList()); + + switch (operationType) { + case PROJECT_QUERY: + return StringUtils.equals(username, project.getCreateUser()) || + viewUsers.contains(username) || + editUsers.contains(username) || + execUsers.contains(username); + case PROJECT_ALTER: + return StringUtils.equals(username, project.getCreateUser()); + default: + throw new ExchangisProjectErrorException(ExchangisProjectExceptionCode.UNSUPPORTED_OPERATION.getCode(), "Unsupported operationType"); + } + } + return false; + } +} diff --git a/exchangis-project/pom.xml b/exchangis-project/pom.xml index 90d88d3ca..99a3c3a17 100644 --- a/exchangis-project/pom.xml +++ b/exchangis-project/pom.xml @@ -5,15 +5,16 @@ exchangis com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 exchangis-project pom - 1.0.0 exchangis-project-server + exchangis-project-provider + exchangis-project-entity diff --git a/exchangis-server/pom.xml b/exchangis-server/pom.xml index 0b14c6bda..2b16d972d 100644 --- a/exchangis-server/pom.xml +++ b/exchangis-server/pom.xml @@ -5,7 +5,7 @@ exchangis com.webank.wedatasphere.exchangis - 1.0.0 + 1.1.2 4.0.0 @@ -20,31 +20,50 @@ com.webank.wedatasphere.exchangis exchangis-datasource-server - 1.0.0 + 1.1.2 + + + com.webank.wedatasphere.exchangis + exchangis-engine-server + ${exchangis.version} + + + org.apache.linkis + linkis-module + + + spring-jdbc + org.springframework + + com.webank.wedatasphere.exchangis exchangis-job-server - 1.0.0 + 1.1.2 - com.webank.wedatasphere.exchangis exchangis-project-server - 1.0.0 + 1.1.2 org.apache.linkis - linkis-storage - ${linkis.version} + linkis-module + + + + com.fasterxml + classmate + 1.5.1 diff --git a/exchangis-server/src/main/resources/exchangis.properties b/exchangis-server/src/main/resources/exchangis.properties index d04af6fd8..4be93901f 100644 --- a/exchangis-server/src/main/resources/exchangis.properties +++ b/exchangis-server/src/main/resources/exchangis.properties @@ -20,9 +20,9 @@ wds.linkis.test.mode=true wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/exchangis_v3?useSSL=false&characterEncoding=UTF-8 -wds.linkis.server.mybatis.datasource.username=root +wds.linkis.server.mybatis.datasource.username= -wds.linkis.server.mybatis.datasource.password=123456 +wds.linkis.server.mybatis.datasource.password= wds.linkis.log.clear=true @@ -34,7 +34,7 @@ wds.exchangis.datasource.client.authtoken.key=hdfs wds.exchangis.datasource.client.authtoken.value=exchangis-auth wds.exchangis.datasource.client.dws.version=v1 -wds.exchangis.datasource.extension.dir=exchangis-extds +wds.exchangis.datasource.extension.dir=exchangis-extds/ ##restful wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.exchangis.datasource.server.restful.api,\ diff --git a/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala b/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala index 975e55c25..a1db9d54a 100644 --- a/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala +++ b/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala @@ -25,7 +25,7 @@ object ApplicationUtils{ mainProgram }{ case e: Exception => - LOG.info("The process has been shutdown: [" + e.getMessage + "]") + LOG.info("The process has been shutdown: [" + e.getMessage + "]", e) System.exit(1) } } diff --git a/images/zh_CN/ch1/frontend_view.png b/images/zh_CN/ch1/frontend_view.png new file mode 100644 index 000000000..bad523915 Binary files /dev/null and b/images/zh_CN/ch1/frontend_view.png differ diff --git a/images/zh_CN/ch1/home_page_en.png b/images/zh_CN/ch1/home_page_en.png new file mode 100644 index 000000000..a752e2984 Binary files /dev/null and b/images/zh_CN/ch1/home_page_en.png differ diff --git a/images/zh_CN/ch1/home_page_zh.png b/images/zh_CN/ch1/home_page_zh.png new file mode 100644 index 000000000..2b71f53b3 Binary files /dev/null and b/images/zh_CN/ch1/home_page_zh.png differ diff --git a/images/zh_CN/ch1/register_eureka.png b/images/zh_CN/ch1/register_eureka.png new file mode 100644 index 000000000..2cd1323ec Binary files /dev/null and b/images/zh_CN/ch1/register_eureka.png differ diff --git a/pom.xml b/pom.xml index 0d9b94e1a..85cd79e9d 100644 --- a/pom.xml +++ b/pom.xml @@ -22,7 +22,7 @@ com.webank.wedatasphere.exchangis exchangis - 1.0.0 + 1.1.2 pom exchangis @@ -37,11 +37,13 @@ - 1.0.0 - 1.1.0 - 1.1.1 + 1.1.2 + 1.1.2 + 1.4.0 + 1.4.0 0.1.0-SNAPSHOT - 2.11.8 + 2.12.12 + 4.7.1 1.8 3.3.3 2.8.5 @@ -62,15 +64,15 @@ 0.9.10 2.21 1.9.5 - 1.4.15 + 1.4.19 0.1.0-SNAPSHOT - exchangis-dao exchangis-project exchangis-datasource + exchangis-engines exchangis-job exchangis-plugins exchangis-server @@ -107,10 +109,17 @@ org.apache.linkis linkis-mybatis ${linkis.version} + + + org.springframework + spring-orm + + org.apache.linkis linkis-module + ${linkis.version} org.springframework.boot @@ -121,20 +130,22 @@ org.hibernate.validator - ${linkis.version} org.apache.linkis linkis-common ${linkis.version} - org.apache.linkis linkis-protocol ${linkis.version} - + + org.apache.linkis + linkis-datasource-client + ${linkis.datasource.version} + com.google.code.gson gson @@ -160,11 +171,10 @@ com.thoughtworks.xstream ${xstream.version} - - org.apache.linkis - linkis-datasource-client - ${linkis.version} + org.springframework + spring-orm + 5.2.15.RELEASE @@ -223,7 +233,7 @@ net.alchim31.maven scala-maven-plugin - 3.2.2 + ${scala-maven-plugin.version} eclipse-add-source @@ -256,7 +266,6 @@ ${scala.version} incremental - true diff --git a/web/.fes.js b/web/.fes.js index b17c624f8..3330a97b9 100644 --- a/web/.fes.js +++ b/web/.fes.js @@ -22,6 +22,9 @@ export default { meta: { name: "projectManage", title: "globalMenu.projectManage", + subs: [ + '/jobManagement' + ] }, }, { diff --git a/web/package.json b/web/package.json index 7490e89f6..41f210b83 100644 --- a/web/package.json +++ b/web/package.json @@ -55,12 +55,14 @@ "@fesjs/plugin-model": "^2.0.0", "@fesjs/plugin-request": "^2.0.2", "@form-create/ant-design-vue": "^3.0.0-alpha.2", - "@vue/compiler-sfc": "3.2.20", + "@vue/compiler-sfc": "3.1.4", "ant-design-vue": "^2.2.7", "echarts": "^5.2.1", "lodash-es": "4.17.21", - "moment": "2.29.1", - "vue": "^3.1.0", + "moment": "^2.29.4", + "monaco-editor": "^0.34.0", + "monaco-editor-webpack-plugin": "^7.0.1", + "vue": "3.1.4", "vue-request": "^1.2.0" }, "private": true diff --git a/web/src/app.js b/web/src/app.js index 85a9cb12f..fd20a1e6f 100644 --- a/web/src/app.js +++ b/web/src/app.js @@ -1,3 +1,8 @@ +/* + * @Description: + * @Author: sueRim + * @Date: 2022-05-13 10:19:27 + */ import { pum as pumApi, request as ajax, access as accessInstance, getRouter } from "@fesjs/fes"; import { message, Modal, ConfigProvider } from "ant-design-vue"; import zhCN from "ant-design-vue/es/locale/zh_CN"; @@ -46,6 +51,7 @@ export const request = { return message.error(error.response.data.data.errorMsg.desc); } console.log(error, error?.response) + if (error?.type === 'REPEAT') return // 重复请求不进行提示 message.error(error?.response?.data?.message || error?.data?.message || "系统异常"); }, }, diff --git a/web/src/common/service.js b/web/src/common/service.js index 37c00d4de..719a61378 100644 --- a/web/src/common/service.js +++ b/web/src/common/service.js @@ -59,9 +59,14 @@ export const getDataSourceList = (params) => { }; // 数据源管理 获取数据源 -export const getDataSourceTypes = () => { +export const getDataSourceTypes = (param) => { + let extra = '' + if (param) { + const { engineType, direct, sourceType } = param + extra = `&engineType=${engineType}&direct=${direct}${sourceType ? '&sourceType=' + sourceType : ''}` + } return request( - `/datasources/type?labels=${getEnvironment()}&t=_${new Date().getTime()}`, + `/datasources/type?labels=${getEnvironment()}&t=_${new Date().getTime()}${extra}`, {}, { method: "GET" } ); @@ -105,10 +110,10 @@ export const getTables = (type, id, dbName) => { { method: "GET" } ); };*/ - +// /datasources/fieldsmaping export const getFields = (params) => { return request( - `/datasources/fieldsmapping`, + `/job/transform/settings`, { ...params, labels: { @@ -282,13 +287,14 @@ export const getJobs = (id, jobType, name, current, size) => { }); }; -export const saveProject = (id, body) => { +export const saveProject = (id, body, type = 'save') => { return request(`/job/${id}/content`, { ...body, labels: { route: getEnvironment() } }, { + headers: { 'save-from': type }, method: "PUT", }); }; @@ -534,4 +540,93 @@ export const getPartitionInfo = (params) => { method: "GET", } ); +} + +// 获取字段映射转换函数 +export const getFieldFunc = (funcType) => { + if (!funcType) return + return request( + `/job/func/${funcType}?labels=${getEnvironment()}&_=${Math.random()}`, + {}, + { + method: "GET", + } + ); +} + +// 获取字段映射转换函数 +export const encryptFunc = (param) => { + return request( + `/datasources/tools/encrypt?labels=${getEnvironment()}`, + param, + { + method: "POST", + } + ); +} + +// 获取执行用户 +export const getExecutor = () => { + return request( + `/job/Executor?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ) +} + +// processor的内容保存 +export const saveProcessor = (param) => { + return request( + `/job/transform/processor/code_content?labels=${getEnvironment()}`, + param, + { + method: "POST", + } + ); +} + +// processor的内容更新 +export const updateProcessor = ({ proc_code_id, ...param }) => { + return request( + `/job/transform/processor/code_content/${proc_code_id}?labels=${getEnvironment()}`, + param, + { + method: "PUT", + } + ); +} + +// processor的内容更新 +export const getTemplate = () => { + return request( + `/job/transform/processor/code_template?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); +} + +// processor的内容更新 +export const getProcessor = (proc_code_id) => { + return request( + `/job/transform/processor/code_content/${proc_code_id}?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); +} + +// 获取项目权限 +export const getProjectPermission = (projectId) => { + return request( + `/getProjectPermission/${projectId}?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); } \ No newline at end of file diff --git a/web/src/images/dataSourceTypeIcon/ELASTICSEARCH.png b/web/src/images/dataSourceTypeIcon/ELASTICSEARCH.png new file mode 100644 index 000000000..241cceb48 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/ELASTICSEARCH.png differ diff --git a/web/src/images/dataSourceTypeIcon/MONGODB.png b/web/src/images/dataSourceTypeIcon/MONGODB.png new file mode 100644 index 000000000..02f299330 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/MONGODB.png differ diff --git a/web/src/images/dataSourceTypeIcon/ORACLE.png b/web/src/images/dataSourceTypeIcon/ORACLE.png new file mode 100644 index 000000000..1b7b9a95d Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/ORACLE.png differ diff --git a/web/src/images/dataSourceTypeIcon/database.png b/web/src/images/dataSourceTypeIcon/database.png new file mode 100644 index 000000000..3417d4aac Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/database.png differ diff --git a/web/src/images/dataSourceTypeIcon/database_active.png b/web/src/images/dataSourceTypeIcon/database_active.png new file mode 100644 index 000000000..c4d9ae821 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/database_active.png differ diff --git a/web/src/images/dataSourceTypeIcon/table.png b/web/src/images/dataSourceTypeIcon/table.png new file mode 100644 index 000000000..56367dabc Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/table.png differ diff --git a/web/src/images/datax-icon.png b/web/src/images/datax-icon.png new file mode 100644 index 000000000..f743fcd5d Binary files /dev/null and b/web/src/images/datax-icon.png differ diff --git a/web/src/images/sqoop-icon.png b/web/src/images/sqoop-icon.png new file mode 100644 index 000000000..31f83e85b Binary files /dev/null and b/web/src/images/sqoop-icon.png differ diff --git a/web/src/pages/dataSourceManage/components/datasourceForm/index.vue b/web/src/pages/dataSourceManage/components/datasourceForm/index.vue index b81b3f3f6..8134077a7 100644 --- a/web/src/pages/dataSourceManage/components/datasourceForm/index.vue +++ b/web/src/pages/dataSourceManage/components/datasourceForm/index.vue @@ -1,12 +1,13 @@ diff --git a/web/src/pages/dataSourceManage/components/editModal.vue b/web/src/pages/dataSourceManage/components/editModal.vue index b6949fc14..064bbb992 100644 --- a/web/src/pages/dataSourceManage/components/editModal.vue +++ b/web/src/pages/dataSourceManage/components/editModal.vue @@ -7,7 +7,7 @@ @cancel="$emit('update:visible', false)" > - +
@@ -180,7 +180,7 @@ export default { ...connectParams } }); - // message.success('连接成功'); + message.success('连接成功'); // this.modalCfg.isTested = false; // } catch (error) { // console.log('error: ', error); diff --git a/web/src/pages/dataSourceManage/components/encryptModal.vue b/web/src/pages/dataSourceManage/components/encryptModal.vue new file mode 100644 index 000000000..3dde79665 --- /dev/null +++ b/web/src/pages/dataSourceManage/components/encryptModal.vue @@ -0,0 +1,111 @@ + + + + + + diff --git a/web/src/pages/dataSourceManage/components/topLine.vue b/web/src/pages/dataSourceManage/components/topLine.vue index bbc42d060..4a7fddd13 100644 --- a/web/src/pages/dataSourceManage/components/topLine.vue +++ b/web/src/pages/dataSourceManage/components/topLine.vue @@ -1,3 +1,8 @@ +