From 61e2d9272c04491d6fee02c9b8da2234d234d1ba Mon Sep 17 00:00:00 2001 From: aiceflower Date: Thu, 26 Sep 2024 22:57:45 +0800 Subject: [PATCH] merge code --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/workflows/check-license.yml | 2 +- .github/workflows/integration-test.yml | 2 +- .github/workflows/publish-docker.yaml | 2 +- .github/workflows/publish-snapshot.yml | 2 +- LICENSE | 1 + .../common/exception/FatalException.java | 2 +- .../java/org/apache/linkis/common/io/Fs.java | 2 + .../linkis/common/utils/JobHistoryInfo.java | 215 ++ .../linkis/common/utils}/LinkisUtils.java | 87 +- .../apache/linkis/common/utils/MD5Utils.java | 45 + .../linkis/common/utils/SecurityUtils.java | 72 + .../common/utils/VariableOperationUtils.java | 19 +- .../linkis/common/ServiceInstance.scala | 18 +- .../linkis/common/conf/BDPConfiguration.scala | 25 +- .../linkis/common/conf/Configuration.scala | 33 +- .../apache/linkis/common/log/LogUtils.scala | 4 +- .../common/utils/CodeAndRunTypeUtils.scala | 19 +- .../linkis/common/utils/LDAPUtils.scala | 34 + .../apache/linkis/common/utils/Utils.scala | 4 +- .../linkis/common/utils/VariableUtils.scala | 56 +- .../variable/VariableOperationTest.java | 15 +- .../src/test/resources/linkis.properties | 4 +- .../common/conf/ConfigurationTest.scala | 7 + .../common/utils/VariableUtilsTest.scala | 10 + .../hadoop/common/utils/KerberosUtils.java | 93 +- .../hadoop/common/conf/HadoopConf.scala | 11 +- .../entity/HDFSFileSystemContainer.scala | 3 +- .../hadoop/common/utils/HDFSUtils.scala | 141 +- .../common/utils/KerberosUtilsTest.java | 2 +- .../hadoop/common/conf/HadoopConfTest.scala | 2 +- .../httpclient/AbstractHttpClient.scala | 31 +- .../AbstractAuthenticationStrategy.scala | 13 +- .../authentication/Authentication.scala | 2 + .../httpclient/config/ClientConfig.scala | 7 +- .../config/ClientConfigBuilder.scala | 10 +- linkis-commons/linkis-module/pom.xml | 91 +- .../linkis/DataWorkCloudApplication.java | 10 +- .../linkis/proxy/ProxyUserService.java} | 7 +- .../linkis/server/InterceptorConfigure.java | 15 +- .../linkis/server/PerformanceInterceptor.java | 57 + .../linkis/server/utils/ModuleUserUtils.java | 4 + .../swagger/SwaggerBeanPostProcessor.java | 65 + .../linkis/utils/LinkisSpringUtils.java | 48 + .../apache/linkis/server/Knife4jConfig.scala | 4 +- .../server/conf/ServerConfiguration.scala | 5 +- .../linkis/server/ticket/RedisClient.scala | 2 +- .../linkis/mybatis/DataSourceUtils.java | 16 + .../mybatis/conf/MybatisConfiguration.java | 5 + .../protocol/constants/TaskConstant.java | 3 +- .../linkis/protocol/UserWithCreator.scala | 20 + .../linkis/protocol/engine/JobInstance.scala | 26 + .../linkis/protocol/utils/TaskUtils.scala | 17 +- .../protocol/utils/ZuulEntranceUtils.scala | 2 +- .../engine/RequestEngineStatusTest.scala | 44 - linkis-commons/linkis-rpc/pom.xml | 9 + .../errorcode/LinkisRpcErrorCodeSummary.java | 6 + .../utils/LoadBalancerOptionsUtils.java | 4 +- .../apache/linkis/rpc/MessageReceiver.scala | 4 + .../linkis/rpc/RPCSpringBeanCache.scala | 15 +- .../linkis/rpc/conf/RPCConfiguration.scala | 32 +- .../rpc/interceptor/RPCServerLoader.scala | 23 +- .../common/InstanceRPCLoadBalancer.scala | 42 - .../common/RetryableRPCInterceptor.scala | 20 +- .../SingleInstanceRPCLoadBalancer.scala | 55 - .../SpringCloudFeignConfigurationCache.scala | 12 +- .../rpc/sender/SpringMVCRPCSender.scala | 140 +- .../apache/linkis/rpc/utils/RPCUtils.scala | 12 +- .../utils/LoadBalancerOptionsUtilsTest.java | 34 - .../linkis/scheduler/AbstractScheduler.scala | 13 +- .../conf/SchedulerConfiguration.scala | 7 +- .../scheduler/queue/AbstractGroup.scala | 7 + .../linkis/scheduler/queue/ConsumeQueue.scala | 2 + .../linkis/scheduler/queue/Consumer.scala | 6 + .../apache/linkis/scheduler/queue/Job.scala | 19 +- .../scheduler/queue/LoopArrayQueue.scala | 2 +- .../scheduler/queue/SchedulerEventState.scala | 4 + .../queue/fifoqueue/FIFOUserConsumer.scala | 37 +- .../ParallelConsumerManager.scala | 9 +- .../ParallelSchedulerContextImpl.scala | 6 + .../conf/SchedulerConfigurationTest.scala | 3 +- linkis-commons/linkis-storage/pom.xml | 53 +- .../org/apache/linkis/storage/FSFactory.java | 96 - .../storage/conf/LinkisStorageConf.java | 65 - .../linkis/storage/csv/StorageCSVWriter.java | 146 - .../linkis/storage/domain/DataType.java | 237 -- .../apache/linkis/storage/domain/Dolphin.java | 141 - .../linkis/storage/domain/MethodEntity.java | 132 - .../domain/MethodEntitySerializer.java | 84 - .../LinkisStorageErrorCodeSummary.java | 8 + .../linkis/storage/excel/ExcelFsWriter.java | 41 - .../linkis/storage/excel/ExcelXlsReader.java | 278 -- .../linkis/storage/excel/FirstRowDeal.java | 55 - .../linkis/storage/excel/RowToCsvDeal.java | 66 - .../storage/excel/StorageExcelWriter.java | 310 --- .../apache/linkis/storage/excel/XlsUtils.java | 51 +- .../linkis/storage/excel/XlsxUtils.java | 55 +- .../exception/ColLengthExceedException.java | 22 +- .../exception/ColumnIndexExceedException.java | 34 + .../storage/exception/StorageErrorCode.java | 8 +- .../exception/StorageReadException.java | 18 +- .../factory/impl/BuildHDFSFileSystem.java | 6 +- .../factory/impl/BuildLocalFileSystem.java | 8 +- .../storage/factory/impl/BuildOSSSystem.java | 70 - .../factory/impl/BuildS3FileSystem.java | 62 - .../apache/linkis/storage/fs/FileSystem.java | 6 +- .../storage/fs/impl/HDFSFileSystem.java | 97 +- .../storage/fs/impl/LocalFileSystem.java | 46 +- .../linkis/storage/fs/impl/OSSFileSystem.java | 394 --- .../linkis/storage/fs/impl/S3FileSystem.java | 367 --- .../io/IOMethodInterceptorFactory.java | 49 - .../resultset/DefaultResultSetFactory.java | 188 -- .../storage/resultset/ResultSetFactory.java | 74 - .../resultset/ResultSetReaderFactory.java | 116 - .../resultset/ResultSetWriterFactory.java | 88 - .../storage/resultset/StorageResultSet.java | 79 - .../resultset/StorageResultSetReader.java | 175 -- .../resultset/StorageResultSetWriter.java | 260 -- .../storage/resultset/html/HtmlResultSet.java | 48 - .../resultset/io/IOResultDeserializer.java | 37 - .../resultset/io/IOResultSerializer.java | 47 - .../storage/resultset/io/IOResultSet.java | 43 - .../resultset/picture/PictureResultSet.java | 48 - .../resultset/table/TableMetaData.java | 44 - .../table/TableResultDeserializer.java | 100 - .../table/TableResultSerializer.java | 111 - .../resultset/txt/TextResultDeserializer.java | 36 - .../resultset/txt/TextResultSerializer.java | 50 - .../storage/resultset/txt/TextResultSet.java | 46 - .../linkis/storage/script/Compaction.java | 44 - .../linkis/storage/script/ParserFactory.java | 31 - .../linkis/storage/script/ScriptFsReader.java | 40 - .../linkis/storage/script/VariableParser.java | 131 - .../script/compaction/PYScriptCompaction.java | 45 - .../script/compaction/QLScriptCompaction.java | 45 - .../compaction/ScalaScriptCompaction.java | 45 - .../compaction/ShellScriptCompaction.java | 45 - .../script/parser/CommonScriptParser.java | 75 - .../storage/script/parser/PYScriptParser.java | 45 - .../storage/script/parser/QLScriptParser.java | 45 - .../script/parser/ScalaScriptParser.java | 45 - .../script/parser/ShellScriptParser.java | 45 - .../script/reader/StorageScriptFsReader.java | 161 -- .../script/writer/StorageScriptFsWriter.java | 124 - .../storage/source/AbstractFileSource.java | 124 - .../linkis/storage/source/FileSource.java | 177 -- .../linkis/storage/source/FileSplit.java | 324 --- .../storage/source/ResultsetFileSource.java | 64 - .../linkis/storage/source/TextFileSource.java | 71 - .../linkis/storage/utils/FileSystemUtils.java | 136 - .../storage/utils/StorageConfiguration.java | 161 -- .../linkis/storage/utils/StorageHelper.java | 156 -- .../linkis/storage/utils/StorageUtils.java | 275 -- .../org/apache/linkis/storage/FSFactory.scala | 98 + .../apache/linkis/storage/LineMetaData.scala | 20 +- .../apache/linkis/storage/LineRecord.scala} | 21 +- .../storage/conf/LinkisStorageConf.scala | 81 + .../linkis/storage/csv/CSVFsReader.scala | 22 + .../linkis/storage/csv/CSVFsWriter.scala | 48 + .../linkis/storage/csv/StorageCSVReader.scala | 4 +- .../linkis/storage/csv/StorageCSVWriter.scala | 117 + .../linkis/storage/domain/DataType.scala | 164 ++ .../linkis/storage/domain/Dolphin.scala | 130 + .../linkis/storage/domain/MethodEntity.scala | 97 + .../linkis/storage/excel/ExcelFsReader.scala | 22 + .../linkis/storage/excel/ExcelFsWriter.scala} | 40 +- .../linkis/storage/excel/ExcelXlsReader.java | 378 +++ .../storage/excel/StorageExcelReader.scala | 4 +- .../storage/excel/StorageExcelWriter.scala | 236 ++ .../excel/StorageMultiExcelWriter.scala} | 43 +- .../apache/linkis/storage/io/IOClient.scala | 75 + .../io/IOMethodInterceptorCreator.scala} | 46 +- .../resultset/DefaultResultSetFactory.scala | 130 + .../storage/resultset/ResultMetaData.scala} | 6 +- .../storage/resultset/ResultRecord.scala | 22 + .../storage/resultset/ResultSetFactory.scala | 69 + .../storage/resultset/ResultSetReader.scala | 93 + .../storage/resultset/ResultSetWriter.scala | 74 + .../storage/resultset/StorageResultSet.scala | 51 + .../resultset/StorageResultSetReader.scala | 174 ++ .../resultset/StorageResultSetWriter.scala | 230 ++ .../resultset/html/HtmlResultSet.scala | 34 + .../storage/resultset/io/IOMetaData.scala | 25 + .../storage/resultset/io/IORecord.scala | 25 + .../resultset/io/IOResultDeserializer.scala | 32 +- .../resultset/io/IOResultSerializer.scala | 43 + .../storage/resultset/io/IOResultSet.scala | 32 + .../resultset/picture/PictureResultSet.scala | 34 + .../resultset/table/TableMetaData.scala | 20 +- .../storage/resultset/table/TableRecord.scala | 14 +- .../table/TableResultDeserializer.scala | 156 ++ .../table/TableResultSerializer.scala | 99 + .../resultset/table/TableResultSet.scala | 32 + .../txt/TextResultDeserializer.scala | 34 + .../resultset/txt/TextResultSerializer.scala | 46 + .../storage/resultset/txt/TextResultSet.scala | 33 + .../storage/script/ScriptFsReader.scala | 37 + .../storage/script/ScriptFsWriter.scala | 115 + .../storage/script/VariableParser.scala | 117 + .../compaction/CommonScriptCompaction.scala} | 30 +- .../compaction/PYScriptCompaction.scala | 39 + .../compaction/QLScriptCompaction.scala} | 25 +- .../compaction/ScalaScriptCompaction.scala | 38 + .../compaction/ShellScriptCompaction.scala | 39 + .../script/parser/CommonScriptParser.scala | 63 + .../script/parser/PYScriptParser.scala | 26 +- .../script/parser/QLScriptParser.scala | 26 +- .../script/parser/ScalaScriptParser.scala | 39 + .../script/parser/ShellScriptParser.scala | 39 + .../script/reader/StorageScriptFsReader.scala | 126 + .../script/writer/StorageScriptFsWriter.scala | 120 + .../storage/source/AbstractFileSource.scala | 82 + .../linkis/storage/source/FileSource.scala | 160 ++ .../linkis/storage/source/FileSplit.scala | 202 ++ .../storage/source/ResultsetFileSource.scala | 53 + .../storage/source/TextFileSource.scala | 52 + .../storage/utils/FileSystemUtils.scala | 171 ++ .../storage/utils/StorageConfiguration.scala | 85 + .../linkis/storage/utils/StorageHelper.scala | 107 + .../linkis/storage/utils/StorageUtils.scala | 231 ++ .../linkis/storage/LineMetaDataTest.java | 36 - .../StorageResultSetWriterFactoryTest.java | 53 - .../writer/StorageScriptFsWriterTest.java | 8 +- .../source/ResultsetFileSourceTest.java | 50 - .../test/resources/result-read-test.dolphin | 1 - .../test/resources/storage-read-test.dolphin | 1 - .../linkis/storage/domain/DataTypeTest.scala | 17 +- .../utils/StorageConfigurationTest.scala | 5 +- .../storage/utils/StorageUtilsTest.scala} | 15 +- .../linkis/cli/application/CtxBuilder.java | 5 + .../application/LinkisClientApplication.java | 2 +- .../command/template/option/BaseOption.java | 2 +- .../command/template/option/Flag.java | 9 +- .../job/interactive/InteractiveJobDesc.java | 11 + .../InteractiveJobDescBuilder.java | 11 +- .../operator/ujes/LinkisJobOper.java | 1 + .../present/file/ResultFileWriter.java | 3 +- .../computation/client/LinkisJobBuilder.scala | 2 +- .../computation/client/LinkisJobClient.scala | 30 +- .../client/once/LinkisManagerClient.scala | 12 +- .../once/action/ListEngineConnAction.scala | 48 + .../once/result/GetEngineConnResult.scala | 38 + .../once/result/ListEngineConnResult.scala | 35 + .../client/once/simple/SimpleOnceJob.scala | 13 +- .../once/simple/SimpleOnceJobBuilder.scala | 53 +- .../linkis/ujes/client/LinkisFSClient.scala} | 31 +- .../linkis/ujes/client/UJESClient.scala | 60 +- .../linkis/ujes/client/UJESClientImpl.scala | 2 +- .../client/request/CreateNewDirAction.scala | 61 + .../client/request/IsPathExistAction.scala | 56 + .../ujes/client/request/JobSubmitAction.scala | 14 + .../ujes/client/request/ResultSetAction.scala | 30 + .../client/request/UploadFileAction.scala | 82 + .../client/response/CreateNewDirResult.scala | 25 + .../client/response/IsPathExistResult.scala | 29 + .../client/response/ResultSetResult.scala | 28 + .../client/response/UploadFileResult.scala | 14 +- .../ujes/client/utils/UJESClientUtils.scala | 36 + .../common/constant/CodeConstants.java | 11 +- .../common/entity/TemplateConfKey.java | 79 + .../common/entity/job/JobRequest.java | 11 + .../protocol/conf/TemplateConfRequest.java} | 35 +- .../protocol/conf/TemplateConfResponse.java} | 18 +- .../governance/common/utils/ECPathUtils.java | 53 - ...onConf.scala => GovernaceCommonConf.scala} | 25 +- .../common/constant/ec/ECConstants.scala | 4 + .../constant/job/JobRequestConstants.scala | 12 +- .../exception/GovernanceErrorException.scala | 5 +- .../governance/common/paser/CodeParser.scala | 14 +- .../protocol/conf/AcrossClusterConf.scala | 26 + .../common/protocol/conf/DepartmentConf.scala | 26 + .../common/protocol/conf/TenantConf.scala | 11 +- .../common/protocol/job/JobReqProcotol.scala | 10 +- .../protocol/task/ResponseTaskExecute.scala | 34 +- .../governance/common/utils/ECPathUtils.scala | 66 + .../common/utils/EngineConnArguments.scala | 8 +- .../common/utils/GovernanceUtils.scala | 27 +- .../governance/common/utils/JobUtils.scala | 30 +- .../utils/OnceExecutorContentUtils.scala | 10 +- .../common/paser/ScalaCodeParserTest.scala | 11 +- .../core/launch/ProcessEngineConnLaunch.scala | 37 +- .../linkis/ecm/core/utils/PortUtils.scala | 28 +- .../linkis-engineconn-manager-server/pom.xml | 6 + .../src/main/assembly/distribution.xml | 3 - .../EngineconnServerErrorCodeSummary.java | 10 +- .../linkis/ecm/restful/ECMRestfulApi.java | 173 ++ .../linkis/ecm/scheduled/EcmClearTask.java | 2 +- .../operator/EngineConnLogOperator.java | 219 -- .../operator/EngineConnYarnLogOperator.java | 149 -- .../impl/DefaultEngineConnKillService.java | 7 +- .../linkis/ecm/utils/ECMCacheUtils.java} | 37 +- .../ecm/server/conf/ECMConfiguration.scala | 26 +- .../server/exception/ECMErrorException.scala | 11 +- .../ecm/server/hook/JarUDFLoadECMHook.scala | 1 + .../operator/EngineConnLogOperator.scala | 193 ++ .../operator/EngineConnYarnLogOperator.scala | 194 ++ .../server/service/ECMMetricsService.scala | 4 +- .../AbstractEngineConnLaunchService.scala | 7 +- .../impl/BmlResourceLocalizationService.scala | 2 + .../impl/DefaultECMMetricsService.scala | 12 +- .../impl/DefaultECMRegisterService.scala | 22 +- .../service/impl/DefaultOperateService.scala | 24 +- .../service/impl/ECMListenerService.scala | 1 - .../impl/ProcessEngineConnLaunchService.scala | 41 +- .../linkis/ecm/server/util/ECMUtils.scala | 3 +- .../impl/DefaultECMRegisterServiceTest.java | 55 - .../OperableOnceEngineConnOperator.java | 92 - .../OnceExecutorExecutionContext.scala | 8 +- .../OperableOnceEngineConnOperator.scala | 72 + .../monitor/TimingMonitorService.java | 6 +- .../exception/HookExecuteException.java | 30 +- .../AsyncConcurrentComputationExecutor.scala | 66 +- .../bml/BmlEnginePreExecuteHook.scala | 9 +- .../conf/ComputationExecutorConf.scala | 14 +- .../executor/cs/CSResourceParser.scala | 20 +- .../executor/cs/CSTableRegister.scala | 2 +- .../executor/cs/CSTableResultSetWriter.scala | 2 +- .../execute/ComputationExecutor.scala | 117 +- .../ConcurrentComputationExecutor.scala | 51 +- .../execute/EngineExecutionContext.scala | 43 +- .../hook/ComputationExecutorHook.scala | 7 + .../executor/hook/InitSQLHook.scala | 3 +- .../executor/hook/PythonModuleLoad.scala | 161 ++ .../hook/PythonModuleLoadEngineConnHook.scala | 64 + .../executor/hook/PythonSparkEngineHook.scala | 45 + .../executor/hook/UDFLoadEngineConnHook.scala | 3 +- .../executor/hook/UseDatabaseEngineHook.scala | 2 +- .../hook/executor/ExecuteOnceHook.scala | 8 + .../ComputationEngineConnMetrics.scala | 8 + .../executor/rs/RsOutputStream.scala | 6 +- .../DefaultNodeHeartbeatMsgManager.scala | 4 + .../service/TaskExecutionServiceImpl.scala | 277 +- .../upstream/ECTaskEntranceMonitor.scala | 2 +- ...ingleThreadUpstreamConnectionMonitor.scala | 21 + .../upstream/handler/ECTaskKillHandler.scala | 2 +- .../ECTaskEntranceMonitorService.scala | 3 +- .../utlis/ComputationEngineConstant.scala | 2 + .../common/conf/EngineConnConf.scala | 8 +- .../common/conf/EngineConnConstant.scala | 8 + .../engineconn/core/hook/ShutdownHook.scala | 10 + .../core/util/EngineConnUtils.scala | 2 +- .../executor/log/AbstractLogCache.java | 39 +- .../acessible/executor/log/MountLogCache.java | 2 +- .../acessible/executor/log/SendAppender.java | 39 +- .../acessible/executor/log/TimeLogCache.java | 25 +- .../EngineConnApplicationInfoOperator.java | 57 - .../AccessibleExecutorConfiguration.scala | 11 +- .../AccessibleEngineConnExecution.scala | 40 +- .../info/NodeHealthyInfoManager.scala | 31 +- .../executor/lock/EngineConnTimedLock.scala | 17 +- .../acessible/executor/log/LogHelper.scala | 7 + .../EngineConnApplicationInfoOperator.scala | 52 + .../service/DefaultAccessibleService.scala | 1 - .../DefaultExecutorHeartbeatService.scala | 32 +- .../service/DefaultManagerService.scala | 6 +- .../service/DefaultOperateService.scala | 29 +- .../service/EngineConnTimedLockService.scala | 12 +- .../service/ExecutorHeartbeatService.scala | 2 + .../executor/service/LockService.scala | 7 +- .../utils/AccessableExecutorUtils.scala | 28 +- .../hook/CallbackEngineConnHook.scala | 25 +- .../callback/service/EngineConnCallback.scala | 2 +- ...ala => EngineConnIdentifierCallback.scala} | 1 + .../service/EngineConnTimedCallback.scala | 20 + .../executor/ExecutorExecutionContext.scala | 17 +- .../EngineConnExecutorConfiguration.scala | 3 + .../EngineconnCoreErrorCodeSummary.java | 6 +- .../common/conf/EnvConfiguration.scala | 4 +- .../exception/EngineConnPluginErrorCode.scala | 26 + .../EngineConnPluginErrorException.scala | 7 +- .../JavaProcessEngineConnLaunchBuilder.scala | 52 +- .../resource/EngineResourceRequest.scala | 2 +- .../common/resource/UserNodeResource.scala | 22 + .../linkis-entrance/pom.xml | 6 + .../conf/EntranceSpringConfiguration.java | 13 +- .../entrance/constant/ServiceNameConsts.java | 2 + .../errorcode/EntranceErrorCodeSummary.java | 11 +- .../entrance/job/EntranceExecutionJob.java | 50 +- .../parser/AbstractEntranceParser.java | 1 - .../linkis/entrance/parser/ParserUtils.java | 2 +- .../persistence/QueryPersistenceManager.java | 3 +- .../restful/EntranceConsumerRestfulApi.java | 88 + .../restful/EntranceLabelRestfulApi.java | 75 +- .../restful/EntranceMetricRestfulApi.java | 40 +- .../entrance/restful/EntranceRestfulApi.java | 521 +++- .../scheduler/CreatorECTypeDefaultConf.java | 93 + .../server/DefaultEntranceServer.java | 16 +- .../server/EntranceFailoverJobServer.java | 180 ++ .../linkis/entrance/EntranceServer.scala | 522 +++- .../entrance/EntranceWebSocketService.scala | 12 - .../entrance/conf/EntranceConfiguration.scala | 77 +- .../execute/DefaultEntranceExecutor.scala | 14 +- .../entrance/execute/EntranceExecutor.scala | 8 - .../execute/EntranceExecutorManager.scala | 10 + .../linkis/entrance/execute/EntranceJob.scala | 5 +- .../impl/EntranceExecutorManagerImpl.scala | 2 +- .../execute/simple/EngineLockListener.scala | 19 +- .../execute/simple/SimpleASyncListener.scala | 59 + .../simple/SimpleEntranceExecutor.scala | 155 ++ .../execute/simple/SimpleSyncListener.scala | 83 + .../interceptor/impl/CommentInterceptor.scala | 31 +- .../impl/CustomVariableUtils.scala | 28 +- .../entrance/interceptor/impl/Explain.scala | 75 +- .../impl/StorePathEntranceInterceptor.scala | 47 +- .../impl/TemplateConfInterceptor.scala} | 25 +- .../interceptor/impl/TemplateConfUtils.scala | 278 ++ .../impl/UserCreatorIPCheckUtils.scala | 2 +- .../impl/VarSubstitutionInterceptor.scala | 18 +- .../apache/linkis/entrance/log/Cache.scala | 7 +- .../linkis/entrance/log/CacheLogReader.scala | 59 +- .../linkis/entrance/log/CacheLogWriter.scala | 22 +- .../entrance/log/ErrorCodeManager.scala | 41 +- .../entrance/log/HDFSCacheLogWriter.scala | 48 +- .../linkis/entrance/log/LogManager.scala | 16 +- .../linkis/entrance/log/LogReader.scala | 7 +- .../linkis/entrance/log/LoopArray.scala | 51 +- ...tranceUserParallelOrchestratorPlugin.scala | 40 +- .../parser/CommonEntranceParser.scala | 2 +- .../persistence/EntranceResultSetEngine.scala | 28 +- .../scheduler/EntranceFIFOUserConsumer.scala | 106 + .../scheduler/EntranceGroupFactory.scala | 155 +- .../EntranceParallelConsumerManager.scala | 83 + .../scheduler/EntranceSchedulerContext.scala | 5 + .../cache/CacheOutputExecuteResponse.scala | 24 - .../scheduler/cache/ReadCacheConsumer.scala | 158 -- .../cache/ReadCacheConsumerManager.scala | 44 - .../entrance/timeout/JobTimeoutManager.scala | 126 +- .../entrance/utils/CommonLogPathUtils.scala | 37 + .../linkis/entrance/utils/EntranceUtils.scala | 108 + .../entrance/utils/JobHistoryHelper.scala | 152 +- .../interceptor/impl/SQLExplainTest.java | 55 + .../impl/TemplateConfUtilsTest.java | 128 + .../interceptor/impl/TestCommentHelper.java | 65 + .../impl/TestHDFSCacheLogWriter.java | 75 + .../interceptor/impl/TestReplaceComment.scala | 42 + .../linkis/ujes/jdbc/UJESSQLDriver.java | 4 + .../ujes/jdbc/LinkisSQLConnection.scala | 41 +- .../linkis/ujes/jdbc/LinkisSQLStatement.scala | 48 +- .../linkis/ujes/jdbc/UJESClientFactory.scala | 53 +- .../linkis/ujes/jdbc/UJESSQLDriverMain.scala | 9 + .../linkis/ujes/jdbc/UJESSQLResultSet.scala | 67 +- .../ujes/jdbc/LinkisSQLStatementTest.java | 194 +- .../ujes/jdbc/UJESSQLResultSetTest.java | 108 + .../linkis-application-manager/pom.xml | 6 + .../loader/EngineConnPluginLoaderConf.java | 44 - .../CacheablesEngineConnPluginLoader.java | 3 +- .../DefaultEngineConnPluginLoader.java | 27 +- .../BmlEngineConnPluginResourceLoader.java | 4 +- .../loader/utils/EngineConnPluginUtils.java | 2 +- .../conf/EngineConnPluginConfiguration.java | 42 - ...bstractEngineConnBmlResourceGenerator.java | 134 - ...DefaultEngineConnBmlResourceGenerator.java | 114 - .../EngineConnBmlResourceGenerator.java | 30 - .../EngineConnLocalizeResourceImpl.java | 69 - .../DefaultEngineConnLaunchService.java | 106 - ...faultEngineConnResourceFactoryService.java | 80 - .../DefaultEngineConnResourceService.java | 351 --- .../service/EngineConnResourceService.java | 43 - .../RefreshAllEngineConnResourceRequest.java | 28 - .../impl/EnginePluginAdminServiceImpl.java | 7 +- .../manager/LinkisManagerApplication.java | 1 + .../manager/am/conf/AMConfiguration.java | 134 +- .../am/conf/ConfigurationMapCache.java | 4 +- ...DefaultEngineConnConfigurationService.java | 5 +- .../manager/am/conf/ManagerMonitorConf.java | 9 +- .../am/converter/DefaultMetricsConverter.java | 17 +- .../converter}/MetricsConverter.java | 2 +- .../am/exception/AMErrorException.java | 10 +- .../manager/am/label/AMLabelChecker.java | 2 - .../manager/am/label/AMLabelFilter.java | 4 +- .../common => am}/label/LabelChecker.java | 2 +- .../MultiUserEngineReuseLabelChooser.java | 2 - .../am/locker/DefaultEngineNodeLocker.java | 10 +- .../am/manager/DefaultEMNodeManager.java | 10 +- .../am/manager/DefaultEngineNodeManager.java | 129 +- .../manager/am/manager/EMNodeManager.java | 7 +- .../manager/am/manager/EngineNodeManager.java | 2 + .../am/pointer/AbstractNodePointer.java | 18 +- .../am/pointer/DefaultEMNodPointer.java | 42 +- .../am/pointer/DefaultEngineNodPointer.java | 10 +- .../am/pointer/DefaultNodePointerBuilder.java | 3 - .../common => am}/pointer/EMNodPointer.java | 2 +- .../pointer/EngineNodePointer.java | 2 +- .../common => am}/pointer/NodePointer.java | 2 +- .../pointer/NodePointerBuilder.java | 2 +- .../am/restful/ECResourceInfoRestfulApi.java | 111 +- .../manager/am/restful/EMRestfulApi.java | 170 +- .../manager/am/restful/EngineRestfulApi.java | 106 +- .../am/selector/DefaultECAvailableRule.java | 43 - .../am/selector/DefaultNodeSelector.java | 75 - .../rule/AvailableNodeSelectRule.java | 68 - .../rule/ConcurrencyNodeSelectRule.java | 37 - .../am/selector/rule/NewECMStandbyRule.java | 82 - .../selector/rule/ResourceNodeSelectRule.java | 124 - .../am/selector/rule/ScoreNodeSelectRule.java | 70 - .../selector/rule/TaskInfoNodeSelectRule.java | 73 - .../am/service/ECResourceInfoService.java | 17 +- .../ConfCacheRemoveBroadcastListener.java | 75 - .../service/em/DefaultECMOperateService.java | 37 - .../am/service/em/DefaultEMEngineService.java | 231 -- .../am/service/em/DefaultEMInfoService.java | 142 - .../service/em/DefaultEMRegisterService.java | 124 - .../em/DefaultEMUnregisterService.java | 79 - .../am/service/em/ECMOperateService.java | 27 - .../am/service/em/EMUnregisterService.java | 28 - .../service/engine/AbstractEngineService.java | 41 - .../engine/DefaultEngineAskEngineService.java | 198 -- .../DefaultEngineConnCanKillService.java | 196 -- .../engine/DefaultEngineCreateService.java | 441 --- .../engine/DefaultEngineInfoService.java | 145 - .../engine/DefaultEngineRecycleService.java | 92 - .../engine/DefaultEngineReuseService.java | 272 -- .../engine/DefaultEngineStopService.java | 316 --- .../EngineConnStatusCallbackService.java | 25 - .../am/service/engine/EngineInfoService.java | 52 - .../service/engine/EngineOperateService.java | 28 - .../am/service/engine/EngineStopService.java | 61 - .../service/heartbeat/AMHeartbeatService.java | 86 - ...efaultEngineConnStatusCallbackService.java | 39 +- .../impl/ECResourceInfoServiceImpl.java | 86 +- .../service/monitor/NodeHeartbeatMonitor.java | 372 --- .../linkis/manager/am/util/EMUtils.java | 123 + .../linkis/manager/am/utils/AMUtils.java | 347 --- .../manager/am/utils/DefaultRetryHandler.java | 122 - .../linkis/manager/am/vo/AMEngineNodeVo.java | 7 +- .../linkis/manager/am/vo/CanCreateECRes.java | 99 + .../apache/linkis/manager/am/vo/ConfigVo.java | 63 +- .../apache/linkis/manager/am/vo/EMNodeVo.java | 16 +- .../manager/label/conf/LabelManagerConf.java | 29 - .../label/score/DefaultNodeLabelScorer.java | 205 -- .../label/service/NodeLabelRemoveService.java | 25 - .../label/service/NodeLabelService.java | 86 - .../impl/DefaultNodeLabelAddService.java | 69 - .../impl/DefaultNodeLabelRemoveService.java | 82 - .../service/impl/DefaultNodeLabelService.java | 497 ---- .../impl/DefaultResourceLabelService.java | 161 -- .../service/impl/DefaultUserLabelService.java | 167 -- .../manager/rm/conf/ResourceStatus.java | 4 +- .../manager/rm/domain/RMLabelContainer.java | 52 +- .../rm/entity/LabelResourceMapping.java | 68 - .../manager/rm/exception/RMErrorCode.java | 30 +- .../KubernetesResourceRequester.java | 209 -- .../KubernetesResourceIdentifierParser.java | 38 - .../impl/ExternalResourceServiceImpl.java | 20 +- .../rm/external/yarn/YarnQueueInfo.java | 86 + .../external/yarn/YarnResourceRequester.java | 95 +- .../manager/rm/message/RMMessageService.java | 73 - .../manager/rm/restful/RMMonitorRest.java | 880 ------ .../rm/service/RequestResourceService.java | 473 ---- .../rm/service/ResourceLockService.java | 95 - .../manager/rm/service/impl/ChangeType.java | 40 - .../impl/DefaultReqResourceService.java | 37 - .../service/impl/DefaultResourceManager.java | 886 ------ ...DriverAndKubernetesReqResourceService.java | 133 - .../impl/DriverAndYarnReqResourceService.java | 151 -- .../impl/LabelResourceServiceImpl.java | 102 - .../rm/service/impl/ResourceLogService.java | 217 -- .../rm/service/impl/UserResourceService.java | 128 - .../linkis/manager/rm/utils/RMUtils.java | 288 -- .../rm/utils/RequestKerberosUrlUtils.java | 6 + .../manager/rm/utils/UserConfiguration.java | 230 -- .../EngineConnBmlResourceMapper.xml | 44 +- .../ExternalResourceProviderDaoImpl.xml | 4 +- .../mysql/EngineConnBmlResourceMapper.xml | 93 - .../loader/EngineConnPluginLoaderConf.scala | 44 + .../conf/EngineConnPluginConfiguration.scala | 42 + .../EngineConnLaunchInterceptor.scala} | 14 +- .../loader/EngineConnPluginsLoader.scala | 32 + ...stractEngineConnBmlResourceGenerator.scala | 123 + ...efaultEngineConnBmlResourceGenerator.scala | 109 + .../EngineConnBmlResourceGenerator.scala} | 31 +- .../DefaultEngineConnLaunchService.scala | 90 + ...aultEngineConnResourceFactoryService.scala | 64 + .../DefaultEngineConnResourceService.scala | 324 +++ .../service/EngineConnLaunchService.scala} | 15 +- .../EngineConnResourceFactoryService.scala} | 19 +- .../service/EngineConnResourceService.scala | 78 + .../manager/am/hook/AskEngineConnHook.scala | 25 + .../am/hook/AskEngineConnHookContext.scala} | 12 +- .../am/selector/DefaultNodeSelector.scala | 75 + .../manager/am/selector/ECAvailableRule.scala | 55 + .../manager/am/selector/NodeSelector.scala} | 15 +- .../rule/AvailableNodeSelectRule.scala | 60 + .../rule/ConcurrencyNodeSelectRule.scala | 24 +- .../selector/rule/HotspotExclusionRule.scala} | 56 +- .../am/selector/rule/NewECMStandbyRule.scala | 83 + .../am/selector/rule/NodeSelectRule.scala} | 9 +- .../rule/OverLoadNodeSelectRule.scala | 66 + .../rule/ResourceNodeSelectRule.scala | 101 + .../selector/rule/ScoreNodeSelectRule.scala | 55 + .../rule/TaskInfoNodeSelectRule.scala | 63 + .../manager/am/service/EMEngineService.scala} | 27 +- .../manager/am/service/EngineService.scala | 12 +- .../am/service/HeartbeatService.scala} | 9 +- .../ConfCacheRemoveBroadcastListener.scala | 70 + .../service/em/DefaultECMOperateService.scala | 36 + .../service/em/DefaultEMEngineService.scala | 216 ++ .../am/service/em/DefaultEMInfoService.scala | 310 +++ .../service/em/DefaultEMRegisterService.scala | 121 + .../em/DefaultEMUnregisterService.scala | 84 + .../am/service/em/ECMOperateService.scala | 27 + .../am/service/em/EMInfoService.scala} | 24 +- .../am/service/em/EMRegisterService.scala | 33 + .../am/service/em/EMUnregisterService.scala | 12 +- .../engine/AbstractEngineService.scala | 30 +- .../DefaultEngineAskEngineService.scala | 329 +++ .../DefaultEngineConnCanKillService.scala | 173 ++ .../engine/DefaultEngineCreateService.scala | 474 ++++ .../engine/DefaultEngineInfoService.scala | 161 ++ .../engine/DefaultEngineOperateService.scala | 45 +- .../engine/DefaultEngineRecycleService.scala | 82 + .../engine/DefaultEngineReuseService.scala | 270 ++ .../engine/DefaultEngineStopService.scala | 343 +++ .../engine/DefaultEngineSwitchService.scala} | 18 +- .../engine/EngineAskEngineService.scala | 40 + .../engine/EngineConnCanKillService.scala | 26 + .../service/engine/EngineCreateService.scala} | 17 +- .../am/service/engine/EngineInfoService.scala | 53 + .../am/service/engine/EngineKillService.scala | 31 + .../service/engine/EngineOperateService.scala | 23 +- .../engine/EngineRecycleService.scala} | 15 +- .../service/engine/EngineReuseService.scala} | 17 +- .../am/service/engine/EngineStopService.scala | 75 + .../service/engine/EngineSwitchService.scala} | 14 +- .../heartbeat/AMHeartbeatService.scala | 112 + .../monitor/NodeHeartbeatMonitor.scala | 493 ++++ .../linkis/manager/am/utils/AMUtils.scala | 310 +++ .../manager/label/LabelManagerUtils.scala} | 26 +- .../label/score/DefaultNodeLabelScorer.scala | 182 ++ .../score/LabelScoreServiceInstance.scala | 35 + .../label/score/NodeLabelScorer.scala} | 35 +- .../label/service/NodeLabelAddService.scala} | 12 +- .../service/NodeLabelRemoveService.scala | 26 + .../label/service/NodeLabelService.scala | 103 + .../label/service/ResourceLabelService.scala} | 32 +- .../label/service/UserLabelService.scala | 30 +- .../impl/DefaultNodeLabelAddService.scala | 56 + .../impl/DefaultNodeLabelRemoveService.scala | 75 + .../impl/DefaultNodeLabelService.scala | 549 ++++ .../impl/DefaultResourceLabelService.scala | 161 ++ .../impl/DefaultUserLabelService.scala | 123 + .../manager/rm/entity/LabelResourceMap.scala} | 39 +- .../rm/entity/ResourceOperationType.scala} | 10 +- .../manager/rm/message/RMMessageService.scala | 68 + .../manager/rm/restful/RMMonitorRest.scala | 843 ++++++ .../rm/service/LabelResourceService.scala} | 39 +- .../rm/service/RequestResourceService.scala | 531 ++++ .../rm/service/ResourceLockService.scala | 84 + .../manager/rm/service/ResourceManager.scala} | 68 +- .../impl/DefaultReqResourceService.scala | 21 +- .../service/impl/DefaultResourceManager.scala | 950 +++++++ .../DriverAndYarnReqResourceService.scala | 187 ++ .../impl/LabelResourceServiceImpl.scala | 106 + .../rm/service/impl/ResourceLogService.scala | 234 ++ .../rm/service/impl/UserResourceService.scala | 116 + .../utils/AcrossClusterRulesJudgeUtils.scala | 246 ++ .../linkis/manager/rm/utils/RMUtils.scala | 365 +++ .../manager/rm/utils/UserConfiguration.scala | 174 ++ .../service/common/label/LabelChecker.scala} | 16 +- .../service/common/label/LabelFilter.scala | 30 + .../common/label/ManagerLabelService.scala | 18 +- .../src/test/java/org/apache/linkis/Scan.java | 26 - .../apache/linkis/WebApplicationServer.java | 34 - .../engineplugin/server/dao/BaseDaoTest.java | 31 - .../dao/EngineConnBmlResourceDaoTest.java | 111 - ...Test.java => ECResourceInfoUtilsTest.java} | 2 +- .../src/test/resources/application.properties | 36 - .../src/test/resources/create.sql | 47 - .../src/test/resources/create_pg.sql | 48 - .../label/conf/LabelManagerConfTest.scala | 35 + .../linkis/manager/rm/utils/RMUtilsTest.scala | 43 + .../manager/label/conf/LabelCommonConfig.java | 7 +- .../label/constant/LabelKeyConstant.java | 6 + .../label/entity/cluster/ClusterLabel.java | 3 +- .../label/entity/engine/DriverTaskLabel.java | 47 +- .../label/entity/engine/EngineType.scala | 5 + .../manager/label/entity/engine/RunType.scala | 3 +- .../label/utils/EngineTypeLabelCreator.java | 2 + .../manager/label/utils/LabelUtil.scala | 37 +- .../manager/label/TestLabelBuilder.java | 5 +- .../factory/StdLabelBuilderFactoryTest.java | 15 + .../entity/engine/EngineTypeLabelTest.java | 65 - .../common/conf/ManagerCommonConf.java | 35 - .../manager/common/conf/RMConfiguration.java | 49 - .../manager/common/constant/AMConstant.java | 6 + .../common/entity/metrics/AMNodeMetrics.java | 11 + .../common/entity/metrics/NodeMetrics.java | 2 + .../manager/common/entity/node/AMEMNode.java | 11 + .../common/entity/node/AMEngineNode.java | 12 + .../manager/common/entity/node/AMNode.java | 4 + .../entity/persistence/PersistenceLabel.java | 17 +- .../persistence/PersistenceNodeMetrics.java | 11 + .../persistence/PersistencerEcNodeInfo.java | 4 +- .../common/entity/resource/CPUResource.java | 2 +- .../entity/resource/CommonNodeResource.java | 30 + .../resource/DriverAndYarnResource.java | 111 +- .../entity/resource/LoadInstanceResource.java | 2 +- .../entity/resource/MemoryResource.java | 2 +- .../common/entity/resource/NodeResource.java | 12 + .../common/entity/resource/Resource.java | 16 - .../common/entity/resource/ResourceType.java | 2 + .../entity/resource/SpecialResource.java | 4 +- .../ManagerCommonErrorCodeSummary.java | 3 +- .../manager/common/operator/Operator.java | 56 - .../common/operator/OperatorFactoryImpl.java | 72 - .../common/protocol/RequestManagerUnlock.java | 59 - .../common/protocol/em/ECMOperateRequest.java | 51 - .../protocol/em/ECMOperateResponse.java | 52 - .../em/EMResourceRegisterRequest.java | 79 - .../common/protocol/em/RegisterEMRequest.java | 79 - .../engine/EngineAskAsyncResponse.java | 48 - .../protocol/engine/EngineAsyncResponse.java | 24 - .../engine/EngineConnReleaseRequest.java | 19 + .../engine/EngineConnStatusCallback.java | 55 - .../engine/EngineConnStatusCallbackToAM.java | 69 - .../protocol/engine/EngineCreateError.java | 61 - .../engine/EngineInfoClearRequest.java | 5 + .../protocol/engine/EngineOperateRequest.java | 42 - .../engine/EngineOperateResponse.java | 53 - .../protocol/node/NodeHeartbeatMsg.java | 7 +- .../protocol/resource/ResourceProtocol.java | 22 - .../resource/ResourceUsedProtocol.java | 51 - .../protocol/resource/ResourceWithStatus.java | 103 - .../resource/ResponseTaskRunningInfo.java | 74 - .../manager/common/utils/ManagerUtils.java | 7 - .../manager/common/utils/ResourceUtils.java | 30 +- .../manager/rm/RequestResourceAndWait.java | 47 - .../manager/common/operator/Operator.scala | 44 + .../common/operator/OperatorFactory.scala | 64 + .../manager/common/protocol/EngineLock.scala} | 39 +- .../common/protocol/OperateRequest.scala | 44 +- .../common/protocol/OperateResponse.scala | 13 +- .../protocol/em/ECMOperateRequest.scala | 37 + .../protocol/em/ECMOperateResponse.scala | 32 + .../em/EMResourceRegisterRequest.scala | 67 + .../protocol/em/RegisterEMRequest.scala | 67 + .../protocol/em/RegisterEMResponse.scala | 8 +- .../protocol/engine/EngineAsyncResponse.scala | 38 + .../engine/EngineConnStatusCallback.scala | 55 + .../engine/EngineOperateRequest.scala | 19 +- .../engine/EngineOperateResponse.scala | 32 + .../protocol/label/LabelUpdateRequest.scala | 19 +- .../protocol/resource/ResourceProtocol.scala | 57 + .../resource/ResourceWithStatus.scala | 21 +- .../linkis/manager/rm/ResourceInfo.scala | 24 + .../linkis/manager/rm/ResultResource.scala | 9 +- .../manager/dao/ECResourceRecordMapper.java | 3 +- .../linkis/manager/dao/LockManagerMapper.java | 7 + .../linkis/manager/dao/NodeManagerMapper.java | 4 +- ...kisManagerPersistenceErrorCodeSummary.java | 4 +- .../persistence/NodeManagerPersistence.java | 21 +- .../NodeMetricManagerPersistence.java | 12 +- .../ResourceManagerPersistence.java | 6 +- .../impl/DefaultLabelManagerPersistence.java | 8 + .../impl/DefaultLockManagerPersistence.java | 30 +- .../impl/DefaultNodeManagerPersistence.java | 70 +- .../DefaultNodeMetricManagerPersistence.java | 27 +- .../impl/DefaultResourceLabelPersistence.java | 17 +- .../DefaultResourceManagerPersistence.java | 12 +- .../manager/util/PersistenceManagerConf.java | 5 - .../mapper/common/ECResourceRecordMapper.xml | 109 + .../LabelManagerMapper.xml | 54 +- .../mapper/common/NodeManagerMapper.xml | 49 +- .../mapper/common/NodeMetricManagerMapper.xml | 9 +- .../ResourceManagerMapper.xml | 23 +- .../mapper/mysql/ECResourceRecordMapper.xml | 129 - .../mapper/mysql/LabelManagerMapper.xml | 585 ---- .../mapper/mysql/ResourceManagerMapper.xml | 148 - .../postgresql/ECResourceRecordMapper.xml | 127 - .../dao/ECResourceRecordMapperTest.java | 134 - .../manager/dao/LabelManagerMapperTest.java | 293 -- .../manager/dao/NodeManagerMapperTest.java | 1 - .../dao/ResourceManagerMapperTest.java | 45 +- .../src/test/resources/application.properties | 17 +- .../src/test/resources/create.sql | 55 +- .../src/test/resources/create_pg.sql | 189 -- linkis-dist/bin/checkEnv.sh | 55 +- linkis-dist/bin/install.sh | 57 +- linkis-dist/deploy-config/linkis-env.sh | 9 +- linkis-dist/docker/ldh.Dockerfile | 7 + linkis-dist/docker/linkis.Dockerfile | 7 + .../linkis/templates/configmap-init-sql.yaml | 107 +- .../templates/configmap-linkis-config.yaml | 100 +- linkis-dist/package/admin/clear_ec_record.sh | 51 + .../package/admin/clear_history_task.sh | 49 + .../package/admin/configuration_helper.sh | 89 + .../admin/linkis_task_res_log_clear.sh | 54 + .../package/conf/application-engineconn.yml | 13 +- .../package/conf/application-eureka.yml | 10 + .../package/conf/application-linkis.yml | 9 + .../conf/linkis-cg-entrance.properties | 4 +- .../conf/linkis-et-monitor-file.properties | 143 + .../package/conf/linkis-et-monitor.properties | 84 + .../package/conf/linkis-mg-gateway.properties | 4 +- linkis-dist/package/conf/linkis.properties | 4 +- .../conf/nacos/application-engineconn.yml | 38 + .../package/conf/nacos/application-linkis.yml | 59 + linkis-dist/package/db/linkis_ddl.sql | 138 +- linkis-dist/package/db/linkis_ddl_pg.sql | 88 +- linkis-dist/package/db/linkis_dml.sql | 32 +- linkis-dist/package/db/linkis_dml_pg.sql | 2 +- .../db/module/linkis_configuration.sql | 38 +- .../db/module/linkis_configuration_dml.sql | 5 - linkis-dist/package/db/module/linkis_udf.sql | 9 +- linkis-dist/package/db/udf/udf_sys.sql | 813 ++++++ .../upgrade/1.5.0_schema/mysql/linkis_ddl.sql | 70 +- .../upgrade/1.6.0_schema/mysql/linkis_ddl.sql | 52 + .../upgrade/1.6.0_schema/mysql/linkis_dml.sql | 19 + linkis-dist/package/sbin/common.sh | 2 + .../package/sbin/ext/linkis-et-monitor | 43 + linkis-dist/package/sbin/linkis-start-all.sh | 18 +- linkis-dist/package/sbin/linkis-stop-all.sh | 8 +- linkis-dist/release-docs/LICENSE | 21 +- .../licenses/LICENSE-akka-protobuf_2.11.txt | 212 -- .../licenses/LICENSE-akka-slf4j_2.11.txt | 212 -- .../licenses/LICENSE-akka-stream_2.11.txt | 212 -- ...akka-actor_2.11.txt => LICENSE-client.txt} | 16 +- .../licenses/LICENSE-jts-core.txt | 277 ++ .../src/main/assembly/distribution.xml | 15 + linkis-engineconn-plugins/doris/pom.xml | 110 + .../doris/src/main/assembly/distribution.xml | 71 + .../doris/DorisEngineConnPlugin.java} | 26 +- .../DorisProcessEngineConnLaunchBuilder.java} | 4 +- .../doris/conf/DorisConfiguration.java | 89 + .../doris/conf/DorisEngineConf.java} | 6 +- .../doris/constant/DorisConstant.java | 73 + .../errorcode/DorisErrorCodeSummary.java | 59 + .../doris/exception/DorisException.java} | 9 +- .../exception/DorisParameterException.java} | 9 +- .../DorisStreamLoadFileException.java} | 9 +- .../doris/executor/DorisDatasourceParser.java | 117 + .../executor/DorisEngineConnExecutor.java | 727 +++++ .../engineplugin/doris/util/DorisUtils.java | 107 + .../resources/linkis-engineconn.properties | 18 +- .../doris/src/main/resources/log4j2.xml | 95 + .../factory/DorisEngineConnFactory.scala | 44 + ...cSearchProcessEngineConnLaunchBuilder.java | 2 +- .../ElasticSearchEngineConnExecutor.java | 4 +- .../impl/ElasticSearchExecutorImpl.java | 2 +- .../executor/client/ResponseHandler.scala | 11 +- .../client/impl/ResponseHandlerImpl.scala | 6 +- .../deployment/ClusterDescriptorAdapter.java | 11 +- .../sql/operation/OperationFactoryImpl.java | 1 + .../sql/operation/impl/DDLOperation.java | 3 + .../flink/client/sql/parser/SqlCommand.java | 2 + .../sql/parser/SqlCommandParserImpl.java | 3 + .../flink/client/utils/FlinkUdfUtils.java | 123 + .../errorcode/FlinkErrorCodeSummary.java | 132 + .../exception/ExecutorInitException.java | 45 + .../exception/FlinkInitFailedException.java | 36 + .../exception/JobExecutionException.java | 36 + .../exception/SqlExecutionException.java | 36 + .../flink/exception/SqlParseException.java | 36 + .../resources/linkis-engineconn.properties | 2 +- .../flink/config/FlinkEnvConfiguration.scala | 13 +- .../flink/executor/FlinkExecutor.scala | 2 +- .../FlinkManagerConcurrentExecutor.scala | 103 + .../FlinkSQLComputationExecutor.scala | 7 +- .../factory/FlinkEngineConnFactory.scala | 63 +- .../flink/hook/FlinkJarUdfEngineHook.scala | 107 + .../launch/FlinkEngineConnLaunchBuilder.scala | 40 +- .../flink/operator/KillOperator.scala | 32 +- .../flink/operator/ListOperator.scala | 17 +- .../flink/operator/StatusOperator.scala | 22 +- .../operator/TriggerSavepointOperator.scala | 25 +- .../FlinkRestClientManager.scala | 21 +- .../flink/util/FlinkValueFormatUtil.scala | 57 + .../flink/LinkisFlinkUdfExample.java} | 10 +- .../hbase/hbase-core/pom.xml | 192 ++ .../exception/ExecutorInitException.java | 0 .../hbase/shell/HBaseShellSession.java | 3 + .../resources/linkis-engineconn.properties | 2 +- .../hbase/hbase-shims-1.2.0/pom.xml | 36 +- .../hbase/hbase-shims-1.4.3/pom.xml | 44 +- .../hbase/hbase-shims-2.2.6/pom.xml | 55 +- .../hbase/hbase-shims-2.5.3/pom.xml | 40 +- linkis-engineconn-plugins/hive/pom.xml | 39 + .../hive/src/main/assembly/distribution.xml | 1 - .../hive/src/main/resources/log4j2.xml | 4 +- .../hive/creation/HiveEngineConnFactory.scala | 2 +- .../HiveEngineConcurrentConnExecutor.scala | 11 +- .../executor/HiveEngineConnExecutor.scala | 99 +- .../client/thrift/ImpalaThriftClient.java | 2 +- .../impala/conf/ImpalaConfiguration.scala | 2 +- .../executor/ImpalaEngineConnExecutor.scala | 57 +- .../io_file/src/main/resources/log4j2.xml | 6 +- .../io/executor/IoEngineConnExecutor.scala | 97 +- .../io/service/FsProxyService.scala | 18 +- .../engineplugin/io/utils/IOHelp.scala | 34 +- .../src/test/resources/testIoResult.dolphin | 0 .../io/executor/IoEngineConnExecutorTest.java | 73 - .../engineplugin/jdbc/ConnectionManager.java | 31 +- .../jdbc/constant/JDBCEngineConnConstant.java | 5 + .../jdbc/errorcode/JDBCErrorCodeSummary.java | 4 +- .../JDBCProcessEngineConnLaunchBuilder.scala | 2 +- .../jdbc/conf/JDBCConfiguration.scala | 6 + .../executor/JDBCEngineConnExecutor.scala | 121 +- .../jdbc/executor/JDBCHelper.java | 38 +- .../executor/JDBCMultiDatasourceParser.scala | 22 +- .../nebula/src/main/assembly/distribution.xml | 2 +- .../executor/NebulaEngineConnExecutor.java | 8 +- .../OpenLooKengProcessECLaunchBuilder.java | 2 +- .../OpenLooKengEngineConnExecutor.java | 2 +- .../pipeline/src/main/resources/log4j2.xml | 3 + .../executor/PipelineEngineConnExecutor.scala | 3 - linkis-engineconn-plugins/pom.xml | 2 + .../PrestoProcessEngineConnLaunchBuilder.java | 2 +- .../executor/PrestoEngineConnExecutor.java | 2 +- .../python/PythonEngineConnPlugin.java | 80 - .../conf/PythonEngineConfiguration.java | 51 - .../LinkisPythonErrorCodeSummary.java | 2 +- .../executor/PythonEngineConnExecutor.java | 189 -- .../python/hook/PythonVersionEngineHook.java | 78 - .../engineplugin/python/utils/Kind.java | 92 - .../python/src/main/resources/log4j2.xml | 3 + .../src/main/resources/python/python.py | 4 +- .../python/PythonEngineConnPlugin.scala | 75 + .../conf/PythonEngineConfiguration.scala | 49 + .../exception/NoSupportEngineException.scala} | 15 +- .../PythonSessionNullException.scala | 35 + .../executor/PythonEngineConnExecutor.scala | 172 ++ .../python/executor/PythonSession.scala | 43 +- .../factory/PythonEngineConnFactory.scala | 2 +- .../python/hook/PythonVersionEngineHook.scala | 67 + ...ythonProcessEngineConnLaunchBuilder.scala} | 6 +- .../engineplugin/python/utils/Kind.scala | 104 + .../engineplugin/python/utils/State.scala | 74 + .../TestNoSupportEngineException.java | 47 - .../TestPythonEngineConnExecutor.java | 82 - .../factory/TestPythonEngineConnFactory.java | 44 - .../python/TestPythonEngineConnPlugin.scala} | 14 +- .../conf/TestPythonEngineConfiguration.scala} | 32 +- .../TestNoSupportEngineException.scala | 95 + .../TestPythonEngineConnExecutor.scala | 82 + .../factory/TestPythonEngineConnFactory.scala | 42 + .../engineplugin/python/utils/TestKind.scala} | 27 +- .../engineplugin/python/utils/TestState.scala | 32 +- .../repl/executor/ReplEngineConnExecutor.java | 30 + .../executor/javarepl/JavaReplCompiler.java | 2 +- .../repl/src/main/resources/log4j2.xml | 2 +- .../repl/src/main/resources/repl-ec.md | 37 + .../shell/ShellEngineConnPlugin.java | 81 - .../common/ShellEngineConnPluginConst.java | 24 - .../shell/conf/ShellEngineConnConf.java | 30 - .../exception/ShellCodeErrorException.java | 28 - .../shell/executor/ReaderThread.java | 97 - .../ShellEngineConnConcurrentExecutor.java | 151 -- .../executor/ShellEngineConnExecutor.java | 339 --- .../shell/executor/YarnAppIdExtractor.java | 53 - .../conf/linkis-engineconn.properties | 3 +- .../shell/src/main/resources/conf/log4j2.xml | 5 +- .../shell/ShellEngineConnPlugin.scala | 76 + .../ShellProcessEngineConnLaunchBuilder.scala | 22 + .../shell/common/ShellEnginePluginConst.scala | 23 + .../shell/conf/ShellEngineConnConf.scala | 30 + .../exception/NoCorrectUserException.scala | 27 + .../shell/executor/ReaderThread.scala | 105 + .../shell/executor/ShellECTaskInfo.scala | 20 + .../ShellEngineConnConcurrentExecutor.scala | 378 +++ .../executor/ShellEngineConnExecutor.scala | 337 +++ .../shell/executor/YarnAppIdExtractor.scala | 81 + .../factory/ShellEngineConnFactory.scala | 17 +- .../executor/TestShellEngineConnExecutor.java | 62 - .../shell/TestShellEngineConnPlugin.scala} | 20 +- .../TestShellEngineConnPluginConst.scala} | 17 +- .../TestNoCorrectUserException.scala} | 20 +- .../TestShellEngineConnExecutor.scala | 62 + linkis-engineconn-plugins/spark/pom.xml | 15 + .../spark/scala-2.12/pom.xml | 12 +- .../spark/datacalc/TestDorisCala.scala} | 44 +- .../spark/DirectPushRestfulApi.java | 70 + ...esApplicationClusterDescriptorAdapter.java | 10 +- .../spark/datacalc/sink/DorisSinkConfig.java | 91 + .../datacalc/source/DorisSourceConfig.java | 74 + .../spark/datacalc/util/PluginUtil.java | 3 +- .../executor/SecureRandomStringUtils.java | 221 ++ .../resources/linkis-engineconn.properties | 2 +- .../src/main/resources/python/mix_pyspark.py | 32 +- .../engineplugin/spark/common/SparkKind.scala | 22 - .../spark/config/SparkConfiguration.scala | 3 + .../spark/datacalc/sink/DorisSink.scala | 57 + .../{SolrSource.scala => DorisSource.scala} | 17 +- .../spark/executor/SQLSession.scala | 10 +- .../executor/SparkEngineConnExecutor.scala | 42 +- .../spark/executor/SparkPythonExecutor.scala | 39 +- .../spark/executor/SparkScalaExecutor.scala | 79 +- .../spark/executor/SparkSqlExecutor.scala | 53 +- .../executor/SparkSubmitOnceExecutor.scala | 2 +- .../factory/SparkEngineConnFactory.scala | 4 + .../spark/imexport/ExportData.scala | 13 +- .../spark/imexport/LoadData.scala | 65 +- ...SubmitProcessEngineConnLaunchBuilder.scala | 18 +- .../spark/mdq/MDQPostExecutionHook.scala | 14 +- .../spark/mdq/MDQPreExecutionHook.scala | 17 +- .../engineplugin/spark/utils/ArrowUtils.scala | 101 + .../spark/utils/DirectPushCache.scala | 65 + .../spark/utils/EngineUtils.scala | 4 +- .../datasources/csv/DolphinToSpark.scala | 26 +- .../spark/src/test/resources/etltest.dolphin | 4 - .../spark/datacalc/TestExcelCala.scala | 135 - .../spark/datacalc/TestRedisCalc.scala | 200 -- .../spark/datacalc/TestRocketmqCala.scala | 71 - .../spark/executor/TestArrowUtil.scala | 78 + linkis-engineconn-plugins/sqoop/pom.xml | 2 +- .../sqoop/src/main/assembly/distribution.xml | 4 - .../sqoop/client/config/ParamsMapping.java | 6 + .../sqoop/src/main/resources/log4j2.xml | 18 +- .../sqoop/client/RemoteClientHolder.scala | 97 + .../DataSourceRpcErrorException.scala | 23 +- .../executor/SqoopOnceCodeExecutor.scala | 10 +- .../sqoop/params/ConnectParamsResolver.scala | 82 + .../SqoopDataSourceParamsResolver.scala | 143 + .../trino/conf/TrinoConfiguration.java | 90 - .../executor/TrinoEngineConnExecutor.java | 561 ---- .../password/CommandPasswordCallback.java | 4 +- .../password/StaticPasswordCallback.java | 2 +- .../engineplugin/trino/utils/TrinoCode.java | 47 - .../resources/linkis-engineconn.properties | 1 - .../trino/src/main/resources/log4j2.xml | 3 + .../trino/TrinoEngineConnPlugin.scala | 66 + ...TrinoProcessEngineConnLaunchBuilder.scala} | 24 +- .../trino/conf/TrinoConfiguration.scala | 73 + .../trino/conf/TrinoEngineConfig.scala | 48 + .../trino/exception/TrinoException.scala | 31 +- .../executor/TrinoEngineConnExecutor.scala | 563 ++++ .../engineplugin/trino/utils/TrinoCode.scala | 65 + .../trino/utils/TrinoSQLHook.scala} | 18 +- linkis-extensions/linkis-et-monitor/pom.xml | 121 + .../src/main/assembly/distribution.xml | 293 ++ .../monitor/LinksMonitorApplication.java | 32 + .../monitor/bml/cleaner/dao/VersionDao.java | 61 + .../entity/CleanedResourceVersion.java | 209 ++ .../bml/cleaner/entity/ResourceVersion.java | 206 ++ .../bml/cleaner/service/CleanerService.java | 10 +- .../bml/cleaner/service/VersionService.java | 26 +- .../service/impl/CleanerServiceImpl.java | 168 ++ .../service/impl/VersionServiceImpl.java | 63 + .../bml/cleaner/vo/CleanResourceVo.java | 29 +- .../config/ApplicationConfiguration.java | 48 + .../linkis/monitor/config/ListenerConfig.java | 35 +- .../linkis/monitor/config/MonitorConfig.java | 75 + .../monitor/config/ScheduledConfig.java | 39 + .../linkis/monitor/entity/EngineEntity.java | 37 +- .../linkis/monitor/entity/EntranceEntity.java | 53 +- .../linkis/monitor/entity/IndexEntity.java | 85 + .../instance/dao/InsLabelRelationDao.java | 21 +- .../monitor/instance/dao/InstanceInfoDao.java | 27 +- .../instance/dao/InstanceLabelDao.java | 28 +- .../instance/entity/InsPersistenceLabel.java | 98 + .../entity/InsPersistenceLabelValue.java | 47 +- .../monitor/instance/entity/InstanceInfo.java | 92 + .../linkis/monitor/jobhistory/QueryUtils.java | 31 + .../jobhistory/dao/JobHistoryMapper.java | 67 + .../monitor/jobhistory/entity/JobHistory.java | 303 +++ .../exception/AnomalyScannerException.java | 37 + .../exception/DirtyDataCleanException.java | 37 + .../linkis/monitor/scheduled/BmlClear.java | 61 + .../monitor/scheduled/EcRecordClear.java | 51 + .../scheduled/EntranceTaskMonitor.java | 210 ++ .../monitor/scheduled/JobHistoryClear.java | 52 + .../monitor/scheduled/JobHistoryMonitor.java | 218 ++ .../monitor/scheduled/ResourceClear.java | 49 + .../monitor/scheduled/ResourceMonitor.java | 205 ++ .../monitor/scheduled/TaskArchiveClear.java | 53 + .../monitor/scheduled/TaskLogClear.java | 51 + .../monitor/scheduled/UserModeMonitor.java | 177 ++ .../linkis/monitor/until/CacheUtils.java | 35 + .../linkis/monitor/until/HttpsUntils.java | 173 ++ .../linkis/monitor/until/JobMonitorUtils.java | 88 + .../linkis/monitor/until/ThreadUtils.java | 67 + .../mapper/common/InsLabelRelationMapper.xml | 67 + .../mapper/common/InstanceInfoMapper.xml | 53 + .../mapper/common/InstanceLabelMapper.xml | 57 + .../mapper/common/JobHistoryMapper.xml | 174 ++ ...kisJobHistoryScanSpringConfiguration.scala | 48 + .../monitor/client/MonitorHTTPClient.scala | 117 + .../client/MonitorHTTPClientClientImpl.scala | 43 +- .../client/MonitorResourceClient.scala | 112 + .../client/MonitorResourceClientImpl.scala | 39 + .../linkis/monitor/constants/Constants.scala | 101 + .../monitor/constants/ScanOperatorEnum.scala | 23 + .../apache/linkis/monitor/core/ob/Event.scala | 11 +- .../linkis/monitor/core/ob/Observer.scala | 26 + .../monitor/core/ob/SingleObserverEvent.java | 42 +- .../core/pac/AbstractDataFetcher.scala | 13 +- .../monitor/core/pac/AbstractScanRule.scala | 51 + .../monitor/core/pac/BaseScannedData.scala | 26 + .../linkis/monitor/core/pac/DataFetcher.scala | 40 + .../linkis/monitor/core/pac/ScanBuffer.scala | 42 + .../linkis/monitor/core/pac/ScanRule.scala | 50 + .../linkis/monitor/core/pac/ScannedData.scala | 26 + .../core/scanner/AbstractScanner.scala | 164 ++ .../monitor/core/scanner/AnomalyScanner.scala | 95 + .../monitor/core/scanner/DefaultScanner.scala | 13 +- .../monitor/factory/MapperFactory.scala | 61 + .../jobhistory/JobHistoryDataFetcher.scala | 120 + .../errorcode/JobHistoryErrCodeHitEvent.scala | 22 + .../errorcode/JobHistoryErrCodeRule.scala | 85 + .../JobHistoryErrorCodeAlertSender.scala | 96 + .../jobhistory/index/JobIndexHitEvent.scala | 22 + .../jobhistory/index/JobIndexRule.scala | 82 + .../jobhistory/index/JobIndexSender.scala | 25 + .../jobtime/JobTimeExceedAlertSender.scala | 112 + .../jobtime/JobTimeExceedHitEvent.scala | 22 + .../jobtime/JobTimeExceedRule.scala | 104 + .../labels/JobHistoryLabelsAlertSender.scala | 71 + .../labels/JobHistoryLabelsHitEvent.scala | 22 + .../labels/JobHistoryLabelsRule.scala | 116 + .../runtime/CommonJobRunTimeRule.scala | 86 + .../runtime/CommonRunTimeAlertSender.scala | 98 + .../runtime/CommonRunTimeHitEvent.scala | 22 + .../JobHistoryRunTimeAlertSender.scala | 72 + .../runtime/JobHistoryRunTimeHitEvent.scala | 22 + .../runtime/JobHistoryRunTimeRule.scala | 88 + .../monitor/request/EmsListAction.scala | 74 + .../monitor/request/EntranceTaskAction.scala | 77 + .../request/MonitorResourceAction.scala | 22 + .../linkis/monitor/request/UserAction.scala | 26 + .../monitor/response/EntranceTaskResult.scala | 36 + .../response/MonitorResourceResult.scala | 18 +- .../linkis/monitor/utils/ScanUtils.java | 71 + .../monitor/utils/alert/AlertDesc.scala | 25 + .../monitor/utils/alert/AlertSender.scala | 19 +- .../utils/alert/PooledAlertSender.scala | 111 + .../utils/alert/ims/ImsAlertDesc.scala | 176 ++ .../utils/alert/ims/ImsAlertLevel.scala | 28 + .../alert/ims/ImsAlertPropFileData.scala | 31 + .../monitor/utils/alert/ims/ImsAlertWay.scala | 26 + .../monitor/utils/alert/ims/ImsRequest.scala | 36 + .../utils/alert/ims/MonitorAlertUtils.scala | 181 ++ .../alert/ims/PooledImsAlertSender.scala | 104 + .../utils/alert/ims/PooledImsAlertUtils.scala | 110 + .../linkis/monitor/utils/log/LogUtils.scala | 8 +- .../utils/alert/PooledImsAlertSenderTest.java | 70 + .../utils/alert/PooledImsAlertUtilsTest.java | 49 + .../src/test/resources/log4j2-console.xml | 46 + .../storage/io/client/DefaultIOClient.scala | 4 +- .../io/iteraceptor/IOMethodInterceptor.scala | 14 +- .../IOMethodInterceptorCreatorImpl.scala | 4 +- .../storage/io/utils/IOClientUtils.scala | 3 +- linkis-extensions/pom.xml | 1 + .../DefaultCodeExecTaskExecutorManager.scala | 7 +- .../monitor/EngineConnMonitor.scala | 3 +- .../physical/CodeLogicalUnitExecTask.scala | 10 +- .../ComputationTaskExecutionReceiver.scala | 10 +- .../ecm/ComputationEngineConnManager.scala | 54 +- .../ecm/cache/EngineAsyncResponseCache.scala | 16 +- .../orchestrator/ecm/conf/ECMPluginConf.scala | 2 + .../impl/ComputationEngineConnExecutor.scala | 2 +- .../DefaultEngineAsyncResponseService.scala | 14 +- .../conf/OrchestratorConfiguration.scala | 2 +- .../execution/AbstractExecution.scala | 26 - .../execution/impl/DefaultTaskManager.scala | 47 +- .../execution/impl/ExecutionImpl.scala | 27 +- .../execution/ExecutionTaskEvent.scala | 13 +- linkis-public-enhancements/distribution.xml | 4 +- .../linkis-bml-server/pom.xml | 2 +- .../src/main/assembly/distribution.xml | 0 .../linkis/bml/LinkisBMLApplication.java | 0 .../apache/linkis/bml/common/Constant.java | 0 .../linkis/bml/common/ExecutorManager.java | 0 .../linkis/bml/common/HdfsResourceHelper.java | 0 .../bml/common/LocalResourceHelper.java | 0 .../linkis/bml/common/OperationEnum.java | 0 .../linkis/bml/common/ResourceHelper.java | 0 .../bml/common/ResourceHelperFactory.java | 0 .../linkis/bml/common/ScheduledTask.java | 0 .../apache/linkis/bml/dao/BmlProjectDao.java | 0 .../apache/linkis/bml/dao/DownloadDao.java | 0 .../apache/linkis/bml/dao/ResourceDao.java | 0 .../org/apache/linkis/bml/dao/TaskDao.java | 0 .../org/apache/linkis/bml/dao/VersionDao.java | 0 .../apache/linkis/bml/entity/BmlProject.java | 0 .../linkis/bml/entity/DownloadModel.java | 0 .../apache/linkis/bml/entity/Resource.java | 0 .../linkis/bml/entity/ResourceTask.java | 0 .../linkis/bml/entity/ResourceVersion.java | 0 .../org/apache/linkis/bml/entity/Version.java | 0 .../errorcode/BmlServerErrorCodeSummary.java | 0 .../linkis/bml/restful/BmlProjectRestful.java | 0 .../linkis/bml/restful/BmlRestfulApi.java | 0 .../linkis/bml/restful/RestfulUtils.java | 0 .../linkis/bml/service/BmlProjectService.java | 0 .../apache/linkis/bml/service/BmlService.java | 0 .../bml/service/BmlShareResourceService.java | 0 .../linkis/bml/service/DownloadService.java | 0 .../linkis/bml/service/ResourceService.java | 0 .../linkis/bml/service/TaskService.java | 0 .../linkis/bml/service/VersionService.java | 0 .../service/impl/BmlProjectServiceImpl.java | 0 .../bml/service/impl/BmlServiceImpl.java | 0 .../impl/BmlShareResourceServiceImpl.java | 0 .../bml/service/impl/DownloadServiceImpl.java | 0 .../bml/service/impl/ResourceServiceImpl.java | 0 .../bml/service/impl/TaskServiceImpl.java | 11 +- .../bml/service/impl/VersionServiceImpl.java | 0 .../linkis/bml/threading/Scheduler.java | 0 .../org/apache/linkis/bml/threading/Task.java | 0 .../linkis/bml/threading/TaskState.java | 0 .../linkis/bml/util/HttpRequestHelper.java | 0 .../org/apache/linkis/bml/util/MD5Utils.java | 0 .../apache/linkis/bml/vo/ResourceBasicVO.java | 0 .../org/apache/linkis/bml/vo/ResourceVO.java | 0 .../linkis/bml/vo/ResourceVersionsVO.java | 0 .../mapper/common}/BmlProjectMapper.xml | 73 +- .../mapper/common/DownloadMapper.xml | 6 +- .../mapper/common}/ResourceMapper.xml | 32 +- .../resources/mapper/common}/TaskMapper.xml | 8 +- .../mapper/common}/VersionMapper.xml | 85 +- .../bml/common/BmlAuthorityException.scala | 0 .../common/BmlPermissionDeniedException.scala | 0 .../bml/common/BmlQueryFailException.scala | 0 .../common/BmlResourceExpiredException.scala | 0 .../common/BmlServerParaErrorException.scala | 0 .../bml/conf/BmlServerConfiguration.scala | 0 .../bml/protocol/BmlServerProtocol.scala | 0 .../apache/linkis/bml/rpc/BmlReceiver.scala | 0 .../test/java/org/apache/linkis/bml/Scan.java | 0 .../linkis/bml/WebApplicationServer.java | 0 .../bml/common/HdfsResourceHelperTest.java | 0 .../bml/common/LocalResourceHelperTest.java | 0 .../bml/common/VersionServiceImplTest.java | 0 .../apache/linkis/bml/dao/BaseDaoTest.java | 0 .../linkis/bml/dao/BmlProjectDaoTest.java | 5 - .../linkis/bml/dao/DownloadDaoTest.java | 0 .../linkis/bml/dao/ResourceDaoTest.java | 0 .../apache/linkis/bml/dao/TaskDaoTest.java | 0 .../apache/linkis/bml/dao/VersionDaoTest.java | 0 .../bml/service/BmlProjectServiceTest.java | 0 .../bml/service/DownloadServiceImplTest.java | 0 .../bml/service/ResourceServiceImplTest.java | 0 .../bml/service/TaskServiceImplTest.java | 0 .../bml/service/VersionServiceImplTest.java | 0 .../src/test/resources/application.properties | 17 +- .../src/test/resources/create.sql | 0 .../mapper/mysql/BmlProjectMapper.xml | 78 - .../resources/mapper/mysql/VersionMapper.xml | 243 -- .../mapper/postgresql/ResourceMapper.xml | 110 - .../mapper/postgresql/TaskMapper.xml | 69 - .../src/test/resources/create_pg.sql | 129 - linkis-public-enhancements/linkis-bml/pom.xml | 34 - .../linkis-configuration/pom.xml | 7 + .../conf/AcrossClusterRuleKeys.java | 53 + .../dao/AcrossClusterRuleMapper.java | 65 + .../dao/ConfigKeyLimitForUserMapper.java} | 29 +- .../configuration/dao/ConfigMapper.java | 40 +- .../configuration/dao/DepartmentMapper.java | 12 +- .../dao/DepartmentTenantMapper.java | 24 +- .../linkis/configuration/dao/LabelMapper.java | 5 + .../dao/TemplateConfigKeyMapper.java | 49 + .../entity/AcrossClusterRule.java | 149 ++ .../configuration/entity/ConfigKey.java | 99 + .../entity/ConfigKeyLimitForUser.java | 200 ++ .../entity/ConfigKeyLimitVo.java | 50 +- .../configuration/entity/ConfigKeyValue.java | 12 + .../configuration/entity/ConfigUserValue.java | 154 ++ .../entity/DepartmentTenantVo.java | 155 ++ .../configuration/entity/DepartmentVo.java | 185 ++ .../entity/TemplateConfigKey.java | 200 ++ .../entity/TemplateConfigKeyVO.java | 16 +- .../linkis/configuration/entity/TenantVo.java | 13 + .../linkis/configuration/entity/UserIpVo.java | 2 + .../enumeration/BoundaryTypeEnum.java | 27 +- .../LinkisConfigurationErrorCodeSummary.java | 6 +- .../api/AcrossClusterRuleRestfulApi.java | 636 +++++ .../restful/api/ConfigurationRestfulApi.java | 382 ++- .../restful/api/TemplateRestfulApi.java | 280 ++ .../api/TenantConfigrationRestfulApi.java | 139 +- .../service/AcrossClusterRuleService.java | 60 + .../service/ConfigKeyService.java | 15 + .../service/DepartmentService.java | 11 +- .../service/TemplateConfigKeyService.java | 51 + .../service/TenantConfigService.java | 16 + .../configuration/service/TenantService.java | 4 + .../impl/AcrossClusterRuleServiceImpl.java | 179 ++ .../service/impl/ConfigKeyServiceImpl.java | 48 +- .../service/impl/DepartmentServiceImpl.java | 56 + .../impl/TemplateConfigKeyServiceImpl.java | 501 ++++ .../service/impl/TenantConfigServiceImpl.java | 166 +- .../service/impl/TenantServiceImpl.java | 33 +- .../util/{HttpsUtil.java => ClientUtil.java} | 10 +- .../configuration/util/CommonUtils.java | 55 + .../mapper/common/AcrossClusterRuleMapper.xml | 205 ++ .../common/ConfigKeyLimitForUserMapper.xml | 145 + .../mapper/{mysql => common}/ConfigMapper.xml | 243 +- .../mapper/common/DepartmentMapper.xml} | 34 +- .../DepartmentTenantMapper.xml} | 68 +- .../resources/mapper/common/LabelMapper.xml | 33 +- .../mapper/common/TemplateConfigKeyMapper.xml | 161 ++ .../mapper/{mysql => common}/UserIpMapper.xml | 12 +- .../{mysql => common}/UserTenantMapper.xml | 23 +- .../mapper/postgresql/ConfigMapper.xml | 265 -- .../mapper/postgresql/UserIpMapper.xml | 91 - .../configuration/conf/Configuration.scala | 11 +- .../configuration/constant/Constants.scala | 2 +- .../service/CategoryService.scala | 7 +- .../service/ConfigurationService.scala | 119 +- .../util/LabelParameterParser.scala | 6 +- .../dao/ConfigKeyLimitForUserMapperTest.java | 63 + .../configuration/dao/ConfigMapperTest.java | 12 +- .../dao/TemplateConfigKeyMapperTest.java | 96 + .../configuration/dao/UserIpMapperTest.java | 91 - .../dao/UserTenantMapperTest.java | 84 - .../api/ConfigurationRestfulApiTest.java | 8 +- .../src/test/resources/application.properties | 16 +- .../src/test/resources/create.sql | 99 +- .../src/test/resources/create_pg.sql | 148 - .../src/test/resources/data.sql | 51 + .../postgresql/contextHistoryMapper.xml | 83 - .../postgresql/contextIDListenerMapper.xml | 50 - .../mapper/postgresql/contextIDMapper.xml | 110 - .../mapper/postgresql/contextMapMapper.xml | 172 -- .../persistence/dao/ContextMapMapperTest.java | 266 -- .../src/test/resources/create_pg.sql | 90 - .../linkis-context-service/pom.xml | 37 - .../linkis-cs-server/pom.xml | 7 +- .../src/main/assembly/distribution.xml | 0 .../org/apache/linkis/cs/ContextSearch.java | 0 .../linkis/cs/DefaultContextSearch.java | 0 .../cs/condition/AbstractCommonCondition.java | 0 .../linkis/cs/condition/AtomicCondition.java | 0 .../cs/condition/BinaryLogicCondition.java | 0 .../apache/linkis/cs/condition/Condition.java | 0 .../linkis/cs/condition/ConditionType.java | 0 .../cs/condition/UnaryLogicCondition.java | 0 .../construction/AndConditionParser.java | 0 .../construction/ConditionBuilder.java | 0 .../construction/ConditionBuilderImpl.java | 0 .../construction/ConditionParser.java | 0 .../construction/ContainsConditionParser.java | 0 .../ContextScopeConditionParser.java | 0 .../ContextTypeConditionParser.java | 0 .../ContextValueTypeConditionParser.java | 19 +- .../construction/NearestConditionParser.java | 0 .../construction/NotConditionParser.java | 0 .../construction/OrConditionParser.java | 0 .../construction/RegexConditionParser.java | 0 .../cs/condition/impl/AndCondition.java | 0 .../cs/condition/impl/ContainsCondition.java | 0 .../condition/impl/ContextScopeCondition.java | 0 .../condition/impl/ContextTypeCondition.java | 0 .../impl/ContextValueTypeCondition.java | 0 .../cs/condition/impl/NearestCondition.java | 0 .../cs/condition/impl/NotCondition.java | 0 .../linkis/cs/condition/impl/OrCondition.java | 0 .../cs/condition/impl/RegexCondition.java | 0 .../linkis/cs/conf/CSConfiguration.java | 12 +- .../cs/contextcache/ContextCacheService.java | 0 .../DefaultContextCacheService.java | 0 .../cs/contextcache/cache/ContextCache.java | 0 .../cache/DefaultContextAddListener.java | 0 .../cache/DefaultContextCache.java | 0 .../cache/csid/ContextIDValue.java | 0 .../cache/csid/ContextIDValueGenerator.java | 0 .../impl/ContextIDValueGeneratorImpl.java | 0 .../cache/csid/impl/ContextIDValueImpl.java | 0 .../cache/cskey/ContextKeyValueContext.java | 0 .../cache/cskey/ContextValueMapSet.java | 0 .../cskey/impl/ContextValueMapSetImpl.java | 0 .../impl/DefaultContextKeyValueContext.java | 0 .../cache/guava/ContextIDRemoveListener.java | 0 .../cs/contextcache/cleaner/AUTOCleaner.java | 0 .../contextcache/conf/ContextCacheConf.java | 0 .../index/ContextInvertedIndex.java | 0 .../index/ContextInvertedIndexSet.java | 0 .../index/ContextInvertedIndexSetImpl.java | 0 .../index/DefaultContextInvertedIndex.java | 0 .../metric/ContextCacheMetric.java | 0 .../contextcache/metric/ContextIDMetric.java | 0 .../metric/DefaultContextCacheMetric.java | 0 .../metric/DefaultContextIDMetric.java | 0 .../cs/contextcache/metric/Metrtic.java | 0 .../cs/contextcache/metric/ObjectInfo.java | 0 .../cs/contextcache/metric/SizeEstimator.java | 0 .../parser/ContextKeyValueParser.java | 0 .../parser/DefaultContextKeyValueParser.java | 0 .../contextcache/utils/ContextCacheUtils.java | 0 .../LinkisCsServerErrorCodeSummary.java | 0 .../ContextSearchFailedException.java | 0 .../execution/AbstractConditionExecution.java | 0 .../cs/execution/ConditionExecution.java | 0 .../fetcher/AbstractContextCacheFetcher.java | 0 .../fetcher/ContextCacheFetcher.java | 0 .../ContextTypeContextSearchFetcher.java | 0 .../fetcher/IterateContextCacheFetcher.java | 0 .../execution/impl/AndConditionExecution.java | 0 .../impl/BinaryLogicConditionExecution.java | 0 .../impl/ContainsConditionExecution.java | 0 .../impl/ContextScopeConditionExecution.java | 0 .../impl/ContextTypeConditionExecution.java | 0 .../ContextValueTypeConditionExecution.java | 0 .../impl/NearestConditionExecution.java | 0 .../execution/impl/NotConditionExecution.java | 0 .../execution/impl/OrConditionExecution.java | 0 .../impl/RegexConditionExecution.java | 0 .../impl/UnaryLogicConditionExecution.java | 0 .../matcher/AbstractContextSearchMatcher.java | 0 .../matcher/AndLogicContextSearchMatcher.java | 0 .../BinaryLogicContextSearchMatcher.java | 0 .../matcher/ConditionMatcherResolver.java | 0 .../matcher/ContainsContextSearchMatcher.java | 0 .../ContextScopeContextSearchMatcher.java | 0 .../matcher/ContextSearchMatcher.java | 0 .../ContextTypeContextSearchMatcher.java | 0 .../ContextValueTypeContextSearchMatcher.java | 0 .../NearestLogicContextSearchMatcher.java | 0 .../matcher/NotLogicContextSearchMatcher.java | 0 .../matcher/OrLogicContextSearchMatcher.java | 0 .../matcher/RegexContextSearchMatcher.java | 0 .../matcher/SkipContextSearchMather.java | 0 .../UnaryLogicContextSearchMatcher.java | 0 .../ruler/AbstractContextSearchRuler.java | 0 .../ruler/CommonListContextSearchRuler.java | 0 .../execution/ruler/ContextSearchRuler.java | 0 .../ruler/NearestContextSearchRuler.java | 0 .../AbstractContextHAManager.java | 0 .../cs/highavailable/ContextHAManager.java | 0 .../DefaultContextHAManager.java | 0 .../conf/ContextHighAvailableConf.java | 0 .../highavailable/exception/CSErrorCode.java | 0 .../ha/BackupInstanceGenerator.java | 0 .../cs/highavailable/ha/ContextHAChecker.java | 0 .../ha/ContextHAIDGenerator.java | 0 .../ha/impl/BackupInstanceGeneratorImpl.java | 0 .../ha/impl/ContextHACheckerImpl.java | 0 .../ha/impl/ContextHAIDGeneratorImpl.java | 0 .../HAContextPersistenceManagerImpl.java | 0 .../proxy/MethodInterceptorImpl.java | 0 .../cs/optimize/ConditionOptimizer.java | 0 .../cs/optimize/OptimizedCondition.java | 0 .../cost/ConditionCostCalculator.java | 0 .../linkis/cs/optimize/dfs/BinaryTree.java | 0 .../cs/optimize/dfs/MinCostBinaryTree.java | 0 .../apache/linkis/cs/optimize/dfs/Node.java | 0 .../impl/CostBasedConditionOptimizer.java | 0 .../persistence/ContextPersistenceBeans.java | 0 .../ContextPersistenceManager.java | 0 .../ContextPersistenceManagerImpl.java | 0 .../cs/persistence/annotation/Ignore.java | 0 .../cs/persistence/annotation/Tuning.java | 0 .../aop/PersistenceTuningAspect.java | 0 .../cs/persistence/conf/PersistenceConf.java | 0 .../persistence/dao/ContextHistoryMapper.java | 0 .../dao/ContextIDListenerMapper.java | 0 .../cs/persistence/dao/ContextIDMapper.java | 0 .../dao/ContextKeyListenerMapper.java | 0 .../cs/persistence/dao/ContextMapMapper.java | 0 .../persistence/entity/ExtraFieldClass.java | 0 .../entity/PersistenceContextHistory.java | 0 .../entity/PersistenceContextID.java | 0 .../entity/PersistenceContextIDListener.java | 0 .../entity/PersistenceContextKey.java | 0 .../entity/PersistenceContextKeyListener.java | 0 .../entity/PersistenceContextKeyValue.java | 0 .../entity/PersistenceContextValue.java | 0 .../exception/ThrowingFunction.java | 0 .../ContextHistoryPersistence.java | 0 .../ContextIDListenerPersistence.java | 0 .../persistence/ContextIDPersistence.java | 0 .../ContextKeyListenerPersistence.java | 0 .../persistence/ContextMapPersistence.java | 0 .../ContextMetricsPersistence.java | 0 .../KeywordContextHistoryPersistence.java | 0 .../persistence/TransactionManager.java | 0 .../impl/ContextHistoryPersistenceImpl.java | 0 .../ContextIDListenerPersistenceImpl.java | 0 .../impl/ContextIDPersistenceImpl.java | 0 .../ContextKeyListenerPersistenceImpl.java | 0 .../impl/ContextMapPersistenceImpl.java | 0 .../impl/ContextMetricsPersistenceImpl.java | 0 .../KeywordContextHistoryPersistenceImpl.java | 0 .../impl/TransactionManagerImpl.java | 0 .../cs/persistence/util/PersistenceUtils.java | 0 .../linkis/cs/server/LinkisCSApplication.java | 0 .../cs/server/conf/ContextServerConf.java | 3 + .../cs/server/enumeration/ServiceMethod.java | 0 .../cs/server/enumeration/ServiceType.java | 0 .../server/label/CSInstanceLabelClient.java | 104 + .../server/parser/DefaultKeywordParser.java | 0 .../cs/server/parser/KeywordMethodEntity.java | 0 .../cs/server/parser/KeywordParser.java | 0 .../protocol/AbstractHttpRequestProtocol.java | 0 .../protocol/ContextHistoryProtocol.java | 0 .../cs/server/protocol/ContextIDProtocol.java | 0 .../protocol/ContextListenerProtocol.java | 0 .../cs/server/protocol/ContextProtocol.java | 0 .../cs/server/protocol/HttpProtocol.java | 0 .../server/protocol/HttpRequestProtocol.java | 0 .../server/protocol/HttpResponseProtocol.java | 0 .../server/protocol/RestResponseProtocol.java | 0 .../restful/ContextHistoryRestfulApi.java | 0 .../server/restful/ContextIDRestfulApi.java | 0 .../restful/ContextListenerRestfulApi.java | 0 .../cs/server/restful/ContextRestfulApi.java | 0 .../cs/server/restful/CsRestfulParent.java | 0 .../cs/server/scheduler/CsScheduler.java | 0 .../server/scheduler/DefaultCsScheduler.java | 0 .../cs/server/scheduler/HttpAnswerJob.java | 0 .../scheduler/HttpAnswerJobBuilder.java | 0 .../linkis/cs/server/scheduler/HttpJob.java | 0 .../cs/server/scheduler/HttpJobBuilder.java | 0 .../cs/server/scheduler/HttpPriorityJob.java | 0 .../cs/server/scheduler/RestJobBuilder.java | 0 .../scheduler/impl/CsExecuteRequest.java | 0 .../impl/CsExecutorExecutionManager.java | 0 .../server/scheduler/impl/CsJobListener.java | 0 .../scheduler/impl/CsSchedulerBean.java | 0 .../server/scheduler/impl/CsSchedulerJob.java | 0 .../impl/JobToExecuteRequestConsumer.java | 0 .../cs/server/service/AbstractService.java | 0 .../server/service/ContextHistoryService.java | 0 .../cs/server/service/ContextIDService.java | 0 .../service/ContextListenerService.java | 0 .../cs/server/service/ContextService.java | 0 .../linkis/cs/server/service/Service.java | 0 .../impl/ContextHistoryServiceImpl.java | 0 .../service/impl/ContextIDServiceImpl.java | 0 .../impl/ContextListenerServiceImpl.java | 0 .../service/impl/ContextServiceImpl.java | 0 .../apache/linkis/cs/server/util/CsUtils.java | 0 .../src/main/resources/cs_ddl.sql | 0 .../mapper/common}/contextHistoryMapper.xml | 22 +- .../common}/contextIDListenerMapper.xml | 11 +- .../mapper/common}/contextIDMapper.xml | 4 +- .../common/contextKeyListenerMapper.xml | 4 +- .../mapper/common}/contextMapMapper.xml | 6 +- .../InstanceAliasConverter.scala | 0 .../instancealias/InstanceAliasManager.scala | 0 .../impl/DefaultInstanceAliasConverter.scala | 0 .../impl/InstanceAliasManagerImpl.scala | 0 .../RouteLabelInstanceAliasConverter.scala | 0 .../cs/server/scheduler/impl/CsExecutor.scala | 0 .../java/org/apache/linkis/cs/AndTest.java | 0 .../org/apache/linkis/cs/ContainsTest.java | 0 .../apache/linkis/cs/ContextScopeTest.java | 0 .../apache/linkis/cs/ContextSearchTest.java | 0 .../org/apache/linkis/cs/ContextTypeTest.java | 0 .../java/org/apache/linkis/cs/RegexTest.java | 0 .../test/java/org/apache/linkis/cs/Scan.java | 0 .../linkis/cs/WebApplicationServer.java | 0 .../contextcache/test/csid/TestContextID.java | 0 .../test/keyword/TestContextKey.java | 0 .../test/keyword/TestContextKeyValue.java | 0 .../keyword/TestContextKeyValueParser.java | 0 .../test/keyword/TestContextValue.java | 0 .../test/service/TestContextCacheService.java | 0 .../apache/linkis/cs/csid/TestContextID.java | 0 .../LinkisCsServerErrorCodeSummaryTest.java | 0 .../ContextSearchFailedExceptionTest.java | 0 .../test/TestContextHAManager.java | 0 .../cs/highavailable/test/haid/TestHAID.java | 0 .../test/persist/TestPersistence.java | 0 .../linkis/cs/keyword/TestContextKey.java | 0 .../cs/keyword/TestContextKeyValue.java | 0 .../linkis/cs/keyword/TestContextValue.java | 0 .../apache/linkis/cs/parser/ApiJsonTest.java | 0 .../cs/persistence/AContextHistory.java | 0 .../linkis/cs/persistence/AContextID.java | 0 .../cs/persistence/AContextIDListener.java | 0 .../linkis/cs/persistence/AContextKey.java | 0 .../cs/persistence/AContextKeyListener.java | 0 .../cs/persistence/AContextKeyValue.java | 0 .../linkis/cs/persistence/AContextValue.java | 0 .../cs/persistence/ContextHistoryTest.java | 0 .../cs/persistence/ContextIDListenerTest.java | 0 .../linkis/cs/persistence/ContextIDTest.java | 0 .../persistence/ContextKeyListenerTest.java | 0 .../linkis/cs/persistence/ContextMapTest.java | 0 .../cs/persistence/ExtraFieldClassTest.java | 0 .../linkis/cs/persistence/MapTypeAdapter.java | 0 .../linkis/cs/persistence/ProxyMethodA.java | 0 .../linkis/cs/persistence/ProxyTest.java | 0 .../apache/linkis/cs/persistence/Scan.java | 0 .../persistence/conf/PersistenceConfTest.java | 0 .../cs/persistence/dao/BaseDaoTest.java | 0 .../dao/ContextHistoryMapperTest.java | 6 +- .../dao/ContextIDListenerMapperTest.java | 0 .../persistence/dao/ContextIDMapperTest.java | 12 +- .../dao/ContextKeyListenerMapperTest.java | 0 .../utils/PersistenceUtilsTest.java | 0 .../linkis/cs/server/CsRestfulParent.java | 0 .../org/apache/linkis/cs/server/Scan.java | 0 .../linkis/cs/server/SchedulerTest.java | 0 .../cs/server/conf/ContextServerConfTest.java | 2 + .../src/test/resources/application.properties | 16 +- .../src/test/resources/application.yml | 0 .../src/test/resources/create.sql | 18 +- .../src/test/resources/linkis.properties | 0 .../src/test/resources/log4j2.xml | 0 .../core/restful/RestfulApiHelper.java | 10 +- .../src/test/resources/application.properties | 5 +- .../service/elasticsearch/pom.xml | 0 .../src/main/assembly/distribution.xml | 0 .../query/service/ElasticConnection.java | 8 +- .../query/service/ElasticParamsMapper.java | 0 .../metadata/query/service/EsMetaService.java | 0 .../service/hdfs/pom.xml | 0 .../hdfs/src/main/assembly/distribution.xml | 0 .../query/service/HdfsConnection.java | 0 .../query/service/HdfsMetaService.java | 0 .../query/service/HdfsParamsMapper.java | 0 .../service/conf/ConfigurationUtils.java | 0 .../service/hive/pom.xml | 0 .../hive/src/main/assembly/distribution.xml | 0 .../query/service/HiveConnection.java | 0 .../query/service/HiveMetaService.java | 0 .../query/service/HiveParamsMapper.java | 0 .../service/jdbc/pom.xml | 0 .../jdbc/src/main/assembly/distribution.xml | 0 .../query/service/AbstractSqlConnection.java | 0 .../query/service/ClickhouseMetaService.java | 0 .../query/service/Db2MetaService.java | 0 .../metadata/query/service/DmMetaService.java | 0 .../query/service/GreenplumMetaService.java | 0 .../query/service/KingbaseMetaService.java | 0 .../query/service/MysqlMetaService.java | 0 .../query/service/OracleMetaService.java | 3 +- .../query/service/PostgresqlMetaService.java | 0 .../query/service/SqlserverMetaService.java | 0 .../service/clickhouse/SqlConnection.java | 0 .../query/service/conf/SqlParamsMapper.java | 0 .../query/service/db2/SqlConnection.java | 8 + .../query/service/dm/SqlConnection.java | 0 .../service/greenplum/SqlConnection.java | 0 .../query/service/kingbase/SqlConnection.java | 0 .../query/service/mysql/SqlConnection.java | 5 +- .../query/service/oracle/SqlConnection.java | 0 .../query/service/postgres/SqlConnection.java | 0 .../service/sqlserver/SqlConnection.java | 0 .../service/kafka/pom.xml | 0 .../kafka/src/main/assembly/distribution.xml | 0 .../query/service/KafkaConnection.java | 0 .../query/service/KafkaMetaService.java | 0 .../query/service/KafkaParamsMapper.java | 0 .../service/mongodb/pom.xml | 0 .../src/main/assembly/distribution.xml | 0 .../query/service/MongoDbConnection.java | 6 +- .../query/service/MongoDbParamsMapper.java | 0 .../query/service/MongodbMetaService.java | 0 .../linkis-datasource/linkis-metadata/pom.xml | 2 - .../src/test/resources/application.properties | 5 +- .../linkis-datasource/pom.xml | 13 +- .../linkis-instance-label-server/pom.xml | 2 +- .../label/InsLabelAutoConfiguration.java | 6 +- .../label/async/AsyncConsumerQueue.java | 0 .../async/GenericAsyncConsumerQueue.java | 0 .../cache/InsLabelCacheConfiguration.java | 0 .../instance/label/conf/InsLabelConf.java | 0 .../label/dao/InsLabelRelationDao.java | 0 .../instance/label/dao/InstanceInfoDao.java | 0 .../instance/label/dao/InstanceLabelDao.java | 0 .../label/entity/InsPersistenceLabel.java | 0 .../entity/InsPersistenceLabelValue.java | 0 .../instance/label/entity/InstanceInfo.java | 0 .../LinkisInstanceLabelErrorCodeSummary.java | 0 .../exception/InstanceErrorException.java | 0 .../label/restful/InstanceRestful.java | 55 +- .../label/service/InsLabelAccessService.java | 0 .../label/service/InsLabelService.java | 0 .../label/service/InsLabelServiceAdapter.java | 0 .../label/service/annotation/AdapterMode.java | 0 .../service/impl/DefaultInsLabelService.java | 6 - .../impl/DefaultInsLabelServiceAdapter.java | 0 .../service/impl/SpringInsLabelService.java | 2 - .../instance/label/utils/EntityParser.java | 0 .../label/vo/InsPersistenceLabelSearchVo.java | 0 .../instance/label/vo/InstanceInfoVo.java | 0 .../mapper/common}/InsLabelRelationMapper.xml | 82 +- .../mapper/common}/InstanceInfoMapper.xml | 22 +- .../mapper/common}/InstanceLabelMapper.xml | 40 +- .../label/service/InsLabelRpcService.scala | 0 .../rpc/DefaultInsLabelRpcService.scala | 0 .../apache/linkis/instance/label/Scan.java | 0 .../instance/label/WebApplicationServer.java | 0 .../instance/label/dao/BaseDaoTest.java | 0 .../label/dao/InsLabelRelationDaoTest.java | 10 - .../label/dao/InstanceLabelDaoTest.java | 5 - .../impl/DefaultInsLabelServiceTest.java | 0 .../label/utils/EntityParserTest.java | 0 .../src/test/resources/application.properties | 0 .../src/test/resources/create.sql | 0 .../postgresql/InsLabelRelationMapper.xml | 202 -- .../mapper/postgresql/InstanceInfoMapper.xml | 55 - .../mapper/postgresql/InstanceLabelMapper.xml | 114 - .../label/dao/InstanceInfoDaoTest.java | 91 - .../src/test/resources/create_pg.sql | 65 - .../cache/impl/DefaultQueryCacheManager.java | 7 +- .../jobhistory/dao/JobHistoryMapper.java | 81 +- .../jobhistory/dao/JobStatisticsMapper.java | 63 + .../jobhistory/entity/JobStatistics.java | 76 + .../linkis/jobhistory/entity/QueryTaskVO.java | 46 + .../restful/api/QueryRestfulApi.java | 579 +++- .../restful/api/StatisticsRestfulApi.java | 195 ++ .../jobhistory/util/JobhistoryUtils.java | 169 ++ .../mapper/common/JobDetailMapper.xml | 22 +- .../mapper/common/JobStatisticsMapper.xml | 52 + .../mapper/mysql/JobHistoryMapper.xml | 180 +- .../mapper/mysql/JobStatisticsMapper.xml | 86 + .../mapper/postgresql/JobHistoryMapper.xml | 69 +- .../mapper/postgresql/JobStatisticsMapper.xml | 86 + .../conf/JobhistoryConfiguration.scala | 5 + .../conversions/TaskConversions.scala | 63 +- .../service/JobHistoryQueryService.java | 10 +- .../service/JobStatisticsQueryService.java | 24 +- .../impl/JobHistoryQueryServiceImpl.scala | 95 +- .../impl/JobStatisticsQueryServiceImpl.scala | 108 + .../linkis/jobhistory/util/QueryUtils.scala | 68 +- .../jobhistory/dao/JobDetailMapperTest.java | 95 - .../jobhistory/dao/JobHistoryMapperTest.java | 13 +- .../restful/api/QueryRestfulApiTest.java | 179 -- .../service/JobHistoryQueryServiceTest.java | 12 +- .../src/test/resources/application.properties | 17 +- .../src/test/resources/create_pg.sql | 62 - .../errorcode/client/ClientConfiguration.java | 2 +- .../handler/LinkisErrorCodeHandler.java | 2 +- .../linkis/bml/conf/BmlConfiguration.scala | 2 +- .../cs/client/utils/ContextClientConf.scala | 2 +- .../config/DatasourceClientConfig.scala | 2 +- .../filesystem/conf/WorkspaceClientConf.scala | 2 +- .../BmlClientErrorCodeSummaryTest.java | 45 - .../builder/ContextClientFactoryTest.java | 35 - .../builder/HttpContextClientConfigTest.java | 33 - .../CsClientErrorCodeSummaryTest.java | 59 - .../apache/linkis/cs/client/test/Test.java | 134 - .../test/bean/ClientTestContextKey.java | 67 - .../listener/CommonContextKeyListener.java | 40 - .../test/no_context_search/TestClear.java | 60 - .../cs/client/test/restful/RestfulTest.java | 146 - .../cs/client/test/service/TestInfo.java | 99 - .../cs/client/test/service/TestRemove.java | 113 - .../test/service/TestSearchService.java | 113 - .../test_multiuser/TestChangeContext.java | 110 - .../test_multiuser/TestCreateContext.java | 122 - .../client/utils/ContextClientConfTest.java | 36 - .../client/utils/ContextServiceUtilsTest.java | 103 - .../cs/client/utils/SerializeHelperTest.java | 60 - .../client/TestDataSourceClient.scala | 123 - .../datasource/client/TestHiveClient.scala | 144 - .../datasource/client/TestMysqlClient.scala | 149 -- .../config/DatasourceClientConfigTest.java | 49 - .../DataSourceClientBuilderExceptionTest.java | 34 - .../imp/LinkisDataSourceRemoteClientTest.java | 37 - .../client/ClientConfigurationTest.java | 46 - .../client/ErrorCodeClientBuilderTest.java | 42 - .../client/LinkisErrorCodeClientTest.java | 44 - .../client/handler/ErrorCodeHandlerTest.java | 37 - .../handler/LinkisErrorCodeHandlerTest.java | 48 - .../manager/LinkisErrorCodeManagerTest.java | 45 - .../LinkisErrorCodeSynchronizerTest.java | 46 - .../client/action/ErrorCodeActionTest.scala | 41 - .../filesystem/WorkspaceClientImplTest.scala | 36 - .../action/OpenScriptFromBMLActionTest.scala | 35 - .../conf/WorkspaceClientConfTest.scala | 45 - .../linkis/udf/entity/PythonModuleInfoVO.java | 209 ++ .../org/apache/linkis/udf/entity/UDFInfo.java | 10 + .../apache/linkis/udf/entity/UDFManager.java | 20 + .../api/rpc/RequestPythonModuleProtocol.scala | 28 + .../rpc/ResponsePythonModuleProtocol.scala | 32 +- .../entity/enumeration/ContextScopeTest.java | 40 - .../entity/enumeration/ContextTypeTest.java | 48 - .../common/entity/enumeration/DBTypeTest.java | 40 - .../entity/enumeration/WorkTypeTest.java | 40 - .../protocol/ContextHistoryTypeTest.java | 36 - .../cs/common/protocol/ContextIDTypeTest.java | 43 - .../common/protocol/ContextKeyTypeTest.java | 37 - .../protocol/ContextKeyValueTypeTest.java | 37 - .../common/protocol/ContextValueTypeTest.java | 37 - .../ContextSerializationHelperTest.java | 199 -- ...CombinedNodeIDContextIDSerializerTest.java | 59 - .../CommonContextKeySerializerTest.java | 60 - .../LinkisBMLResourceSerializerTest.java | 63 - .../data/CSResultDataSerializerTest.java | 60 - .../data/LinkisJobDataSerializerTest.java | 61 - .../value/metadata/CSTableSerializerTest.java | 65 - .../object/CSFlowInfosSerializerTest.java | 57 - .../test/ContextSerializationHelperTest.java | 187 -- .../cs/common/utils/CSCommonUtilsTest.java | 67 - .../utils/CSHighAvailableUtilsTest.java | 64 - .../cs/listener/test/TestContextID.java | 35 - .../cs/listener/test/TestContextKeyValue.java | 49 - .../errorcode/common/CommonConfTest.java | 33 - .../query/common/MdmConfigurationTest.java | 37 - .../common/cache/CacheConfigurationTest.java | 40 - .../common/cache/ConnCacheManagerTest.java | 48 - .../MetaMethodInvokeExceptionTest.java | 34 - .../exception/MetaRuntimeExceptionTest.java | 34 - .../udf/excepiton/UDFExceptionTest.java | 33 - .../linkis/udf/utils/ConstantVarTest.java | 48 - .../server/conf/UdfTreeConf.java | 26 + .../server/dao/UdfBaseInfoMapper.java} | 18 +- .../server/domain/UdfBaseInfoEntity.java | 205 ++ .../server/domain/UdfManagerEntity.java | 23 + .../server/domain/UdfTreeEntity.java | 12 + .../server/response/EngineLabelResponse.java | 2 +- .../ConfigurationTemplateRestfulApi.java | 15 +- .../restful/DatasourceAccessRestfulApi.java | 10 +- .../restful/DatasourceEnvRestfulApi.java | 7 +- .../restful/DatasourceTypeKeyRestfulApi.java | 8 +- .../restful/DatasourceTypeRestfulApi.java | 9 +- .../server/restful/ErrorCodeRestfulApi.java | 8 +- .../restful/GatewayAuthTokenRestfulApi.java | 75 +- .../RmExternalResourceProviderRestfulApi.java | 9 +- .../server/restful/UdfManagerRestfulApi.java | 26 +- .../server/restful/UdfTreeRestfulApi.java | 52 +- .../service/GatewayAuthTokenService.java | 2 + .../server/service/UdfBaseInfoService.java | 28 + .../impl/GatewayAuthTokenServiceImpl.java | 8 + .../service/impl/UdfBaseInfoServicelmpl.java | 30 + .../server/utils/UdfTreeUtils.java | 93 + .../conf/WorkSpaceConfiguration.java | 15 +- .../constant/WorkSpaceConstants.java | 8 + .../exception/WorkspaceExceptionManager.java | 18 + .../restful/api/BMLFsRestfulApi.java | 15 +- .../filesystem/restful/api/FsRestfulApi.java | 601 ++++- .../linkis/filesystem/util/WorkspaceUtil.java | 4 + .../mapper/common/CgManagerLabelMapper.xml | 6 +- .../ConfigurationConfigKeyMapper.xml | 22 +- .../common/ConfigurationConfigValueMapper.xml | 11 +- .../ConfigurationKeyEngineRelationMapper.xml | 5 +- .../DatasourceAccessMapper.xml | 8 +- .../{mysql => common}/DatasourceEnvMapper.xml | 10 +- .../DatasourceTypeKeyMapper.xml | 10 +- .../DatasourceTypeMapper.xml | 12 +- .../EngineConnPluginBmlResourcesMapper.xml | 6 +- .../GatewayAuthTokenMapper.xml | 10 +- .../PsErrorCodeMapper.xml | 10 +- .../RmExternalResourceProviderMapper.xml | 12 +- .../mapper/common/UdfBaseInfoMapper.xml | 8 +- .../{mysql => common}/UdfManagerMapper.xml | 6 +- .../{mysql => common}/UdfTreeMapper.xml | 12 +- .../mapper/{mysql => common}/VarMapper.xml | 8 +- .../mapper/mysql/PsErrorCodeMapper.xml | 47 - .../ConfigurationConfigKeyMapper.xml | 68 - .../postgresql/DatasourceAccessMapper.xml | 49 - .../mapper/postgresql/DatasourceEnvMapper.xml | 51 - .../postgresql/DatasourceTypeKeyMapper.xml | 64 - .../postgresql/DatasourceTypeMapper.xml | 50 - .../postgresql/GatewayAuthTokenMapper.xml | 51 - .../RmExternalResourceProviderMapper.xml | 47 - .../mapper/postgresql/UdfTreeMapper.xml | 51 - .../resources/mapper/postgresql/VarMapper.xml | 61 - .../linkis/filesystem/service/FsService.scala | 20 + .../filesystem/validator/PathValidator.scala | 10 +- .../linkis/basedatamanager/server/Scan.java | 26 - .../server/WebApplicationServer.java | 34 - .../dao/ConfigurationConfigKeyMapperTest.java | 86 - .../ConfigurationConfigValueMapperTest.java | 65 - ...figurationKeyEngineRelationMapperTest.java | 53 - .../dao/DatasourceAccessMapperTest.java | 48 - .../server/dao/DatasourceEnvMapperTest.java | 55 - .../dao/DatasourceTypeKeyMapperTest.java | 39 - .../server/dao/DatasourceTypeMapperTest.java | 39 - ...ngineConnPluginBmlResourcesMapperTest.java | 56 - .../dao/GatewayAuthTokenMapperTest.java | 55 - .../server/dao/PsErrorCodeMapperTest.java | 50 - .../RmExternalResourceProviderMapperTest.java | 52 - .../server/dao/UdfTreeMapperTest.java | 54 - .../ConfigurationTemplateRestfulApiTest.java | 143 - .../DatasourceAccessRestfulApiTest.java | 149 -- .../restful/DatasourceEnvRestfulApiTest.java | 138 - .../DatasourceTypeKeyRestfulApiTest.java | 158 -- .../restful/DatasourceTypeRestfulApiTest.java | 159 -- .../restful/ErrorCodeRestfulApiTest.java | 154 -- .../GatewayAuthTokenRestfulApiTest.java | 161 -- .../server/restful/MvcUtils.java | 126 - ...xternalResourceProviderRestfulApiTest.java | 154 -- .../restful/UdfManagerRestfulApiTest.java | 151 -- .../server/restful/UdfTreeRestfulApiTest.java | 159 -- .../service/DatasourceAccessServiceTest.java | 63 - .../service/DatasourceEnvServiceTest.java | 65 - .../service/DatasourceTypeKeyServiceTest.java | 65 - .../service/DatasourceTypeServiceTest.java | 64 - .../server/service/ErrorCodeServiceTest.java | 63 - .../service/GatewayAuthTokenServiceTest.java | 63 - ...RmExternalResourceProviderServiceTest.java | 60 - .../server/service/UdfManagerServiceTest.java | 59 - .../server/service/UdfTreeServiceTest.java | 60 - .../apache/linkis/errorcode/server/Scan.java | 26 - .../server/WebApplicationServer.java | 34 - .../errorcode/server/dao/BaseDaoTest.java | 31 - .../restful/api/FsRestfulApiTest.java | 2 +- .../java/org/apache/linkis/variable/Scan.java | 26 - .../linkis/variable/WebApplicationServer.java | 34 - .../linkis/variable/dao/BaseDaoTest.java | 31 - .../linkis/variable/dao/VarMapperTest.java | 124 - .../linkis/variable/restful/MvcUtils.java | 115 - .../restful/api/VariableRestfulApiTest.java | 112 - .../variable/service/VariableServiceTest.java | 85 - .../src/test/resources/application.properties | 55 +- .../resources/basedata_manager_create.sql | 434 --- .../resources/basedata_manager_create_pg.sql | 252 -- .../src/test/resources/error_code_create.sql | 34 - .../test/resources/error_code_create_pg.sql | 32 - .../src/test/resources/info.text | 19 - .../src/test/resources/variable_create.sql | 61 - .../src/test/resources/variable_create_pg.sql | 59 - .../label/client/InstanceLabelClient.scala | 25 +- .../lock/CommonLockSpringConfiguration.java | 2 +- .../common/lock/dao/CommonLockMapper.java | 6 +- .../common/lock/entity/CommonLock.java | 9 + .../lock/service/CommonLockService.java | 2 + .../impl/DefaultCommonLockService.java | 24 +- .../mapper/common/CommonLockMapper.xml | 13 +- .../common/lock/dao/CommonLockMapperTest.java | 58 +- .../src/test/resources/application.properties | 15 +- .../src/test/resources/create.sql | 6 +- .../src/test/resources/data.sql | 22 + .../linkis-udf-service/pom.xml | 2 +- .../apache/linkis/udf/api/UDFRestfulApi.java | 280 ++ .../udf/dao/PythonModuleInfoMapper.java} | 36 +- .../org/apache/linkis/udf/dao/UDFDao.java | 4 +- .../org/apache/linkis/udf/dao/UDFTreeDao.java | 0 .../apache/linkis/udf/dao/UDFVersionDao.java | 0 .../linkis/udf/entity/PythonModuleInfo.java | 158 ++ .../udf/service/PythonModuleInfoService.java | 29 +- .../apache/linkis/udf/service/UDFService.java | 2 +- .../linkis/udf/service/UDFTreeService.java | 0 .../impl/PythonModuleInfoServiceImpl.java | 64 + .../udf/service/impl/UDFServiceImpl.java | 8 +- .../udf/service/impl/UDFTreeServiceImpl.java | 0 .../mapper/common/PythonModuleInfoMapper.xml | 93 + .../main/resources/mapper/common}/UDFDao.xml | 164 +- .../resources/mapper/common/UDFTreeDao.xml | 56 +- .../mapper/common}/UDFVersionDao.xml | 34 +- .../linkis/udf/api/rpc/UdfReceiver.scala | 102 + .../udf/api/rpc/UdfReceiverChooser.scala | 9 +- .../linkis/udf/utils/UdfConfiguration.scala | 0 .../test/java/org/apache/linkis/udf/Scan.java | 0 .../linkis/udf/WebApplicationServer.java | 0 .../udf/api/PythonModuleRestfulApiTest.java | 132 + .../linkis/udf/api/UDFRestfulApiTest.java | 0 .../apache/linkis/udf/dao/BaseDaoTest.java | 0 .../udf/dao/PythonModuleInfoMapperTest.java | 113 + .../org/apache/linkis/udf/dao/UDFDaoTest.java | 0 .../apache/linkis/udf/dao/UDFTreeDaoTest.java | 0 .../linkis/udf/dao/UDFVersionDaoTest.java | 0 .../service/PythonModuleInfoServiceTest.java | 129 + .../linkis/udf/service/UDFServiceTest.java | 0 .../udf/service/UDFTreeServiceTest.java | 0 .../src/test/resources/application.properties | 17 +- .../src/test/resources/create.sql | 93 + .../src/test/resources/data.sql} | 74 +- .../src/test/resources/linkis.properties | 0 .../udf/utils/UdfConfigurationTest.scala | 0 .../main/resources/mapper/mysql/UDFDao.xml | 434 --- .../resources/mapper/mysql/UDFVersionDao.xml | 130 - .../linkis/udf/api/rpc/UdfReceiver.scala | 56 - .../src/test/resources/create_pg.sql | 143 - linkis-public-enhancements/pom.xml | 13 +- .../linkis-eureka/pom.xml | 22 + .../conf/TokenConfiguration.scala | 4 +- .../service/CachedTokenService.scala | 2 +- .../authentication/dao/TokenDaoTest.java | 3 +- .../src/test/resources/application.properties | 5 +- .../linkis-gateway-core/pom.xml | 10 + .../gateway/config/GatewayConfiguration.scala | 15 + .../config/GatewaySpringConfiguration.scala | 15 +- .../gateway/security/SecurityFilter.scala | 75 +- .../linkis/gateway/security/UserRestful.scala | 16 +- .../StaticAuthenticationStrategy.scala | 20 +- .../dws/config/DWSClientConfig.scala | 9 +- .../response/DWSAuthenticationResult.scala | 5 + .../linkis-gateway-server-support/pom.xml | 7 + .../gateway/dss/parser/DSSGatewayParser.scala | 5 + .../ujes/parser/ECMRequestGatewayParser.scala | 74 + .../parser/EntranceRequestGatewayParser.scala | 71 +- .../route/AbstractLabelGatewayRouter.scala | 31 +- .../linkis-spring-cloud-gateway/pom.xml | 4 - .../http/GatewayAuthorizationFilter.java | 23 +- .../http/IpPriorityLoadBalancer.java | 108 + ...LinkisLoadBalancerClientConfiguration.java | 35 + .../http/SpringCloudGatewayConstant.java | 23 + .../SpringCloudGatewayConfiguration.scala | 81 +- .../http/SpringCloudGatewayHttpRequest.scala | 4 +- linkis-web-next/package-lock.json | 121 +- linkis-web-next/package.json | 8 +- linkis-web-next/public/log/noLog.svg | 28 + .../{dateReport.svg => globalVariable.svg} | 0 linkis-web-next/src/App.vue | 2 - .../src/components/editor/editor.vue | 228 -- .../src/components/editor/index.less | 61 - .../src/components/editor/index.vue | 96 + .../src/components/editor/keyword/hql.ts | 2384 ----------------- .../src/components/editor/keyword/python.ts | 384 --- .../src/components/editor/keyword/sas.ts | 172 -- .../src/components/editor/keyword/sh.ts | 570 ---- .../src/components/editor/languages/hql.ts | 1587 ----------- .../src/components/editor/languages/out.ts | 1570 ----------- .../src/components/editor/languages/sas.ts | 66 - .../src/components/editor/languages/sh.ts | 220 -- .../src/components/editor/linkis_dml.sql | 50 - .../src/components/editor/monaco-loader.ts | 23 +- .../editor/sqlFormatter/core/Formatter.ts | 348 --- .../editor/sqlFormatter/core/Indentation.ts | 87 - .../editor/sqlFormatter/core/InlineBlock.ts | 120 - .../editor/sqlFormatter/core/Tokenizer.ts | 508 ---- .../sqlFormatter/languages/Db2Formatter.ts | 601 ----- .../sqlFormatter/languages/N1qlFormatter.ts | 276 -- .../sqlFormatter/languages/PlSqlFormatter.ts | 463 ---- .../languages/StandardSqlFormatter.ts | 379 --- .../editor/sqlFormatter/sqlFormatter.ts | 51 - linkis-web-next/src/components/editor/util.ts | 14 +- .../src/components/sidebar/index.less | 37 +- .../src/components/sidebar/index.vue | 81 +- .../src/dss/assets/styles/app.less | 92 - .../src/dss/assets/styles/normalize.less | 387 --- linkis-web-next/src/dss/dssRouter.ts | 19 - linkis-web-next/src/dss/view/app.vue | 28 - .../src/dss/view/commonIframe/index.vue | 101 - linkis-web-next/src/dss/view/layout.vue | 24 - .../src/dss/view/logPage/index.vue | 125 - linkis-web-next/src/dss/view/login/index.vue | 183 +- linkis-web-next/src/helper/db.ts | 51 +- linkis-web-next/src/helper/storage.ts | 12 +- linkis-web-next/src/layout.vue | 332 +-- linkis-web-next/src/locales/en.ts | 257 +- linkis-web-next/src/locales/zh.ts | 128 +- linkis-web-next/src/main.ts | 3 +- .../src/pages/ECMManagement/index.vue | 288 ++ .../src/pages/ECMManagement/modal.vue | 173 ++ .../pages/globalHistoryManagement/count.vue | 88 +- .../pages/globalHistoryManagement/drawer.vue | 248 -- .../globalHistoryManagement/drawer/index.vue | 188 ++ .../drawer/taskDetails.vue | 253 ++ .../drawer/taskLogs.vue | 261 ++ .../drawer/taskResults.vue | 184 ++ .../pages/globalHistoryManagement/filter.vue | 242 +- .../pages/globalHistoryManagement/index.less | 18 - .../pages/globalHistoryManagement/index.vue | 360 ++- .../pages/globalHistoryManagement/table.vue | 362 --- .../globalHistoryManagement/tooltipText.vue | 20 +- .../src/pages/globalVariables/index.vue | 149 ++ linkis-web-next/src/pages/login/index.vue | 201 -- .../pages/microServiceManagement/index.vue | 208 ++ .../pages/microServiceManagement/modal.vue | 149 ++ .../src/pages/parameterConfig/card.vue | 240 +- .../src/pages/parameterConfig/drawer.vue | 826 +++--- .../src/pages/parameterConfig/handleChange.ts | 24 - .../src/pages/parameterConfig/ide/index.vue | 103 - .../src/pages/parameterConfig/index.less | 45 +- .../src/pages/parameterConfig/index.vue | 204 +- .../src/pages/parameterConfig/list.vue | 129 - .../src/pages/parameterConfig/modal.vue | 159 +- .../parameterConfig/tableauServer/index.vue | 20 - .../pages/resource/history/drawer/index.vue | 153 ++ .../src/pages/resource/history/drawer/log.vue | 253 ++ .../src/pages/resource/history/index.vue | 237 +- linkis-web-next/src/pages/resource/index.vue | 22 +- .../src/pages/resource/manage/index.vue | 237 +- linkis-web-next/src/router/index.ts | 129 +- linkis-web-next/src/service/api.ts | 82 +- linkis-web-next/src/service/apiCache.ts | 25 +- linkis-web-next/src/style/style.less | 593 +--- linkis-web-next/src/util/currentModules.ts | 50 - linkis-web-next/src/util/index.ts | 49 +- linkis-web/.env | 2 +- linkis-web/package.json | 7 +- .../licenses/LICENSE-hint.css.txt | 21 + .../apps/linkis/assets/styles/console.scss | 21 + .../apps/linkis/assets/styles/hint.min.css | 21 + .../linkis/components/variable/index.scss | 5 + .../apps/linkis/components/variable/index.vue | 10 +- .../src/apps/linkis/i18n/common/en.json | 127 +- .../src/apps/linkis/i18n/common/zh.json | 89 +- .../src/apps/linkis/module/ECM/engineConn.vue | 15 +- .../src/apps/linkis/module/ECM/index.vue | 10 +- linkis-web/src/apps/linkis/module/ECM/log.vue | 67 +- .../module/EnginePluginManagement/index.scss | 18 +- .../module/EnginePluginManagement/index.vue | 66 +- .../linkis/module/acrossClusterRule/index.js | 10 +- .../module/acrossClusterRule/index.scss | 64 +- .../linkis/module/acrossClusterRule/index.vue | 586 ++++ .../apps/linkis/module/codeQuery/index.scss | 5 + .../apps/linkis/module/codeQuery/index.vue | 34 +- .../linkis/module/configManagement/index.js | 10 +- .../linkis/module/configManagement/index.scss | 63 + .../linkis/module/configManagement/index.vue | 549 ++++ .../apps/linkis/module/datasource/index.vue | 2 +- .../linkis/module/datasourceTypeKey/index.vue | 2 +- .../gatewayAuthToken/EditForm/index.vue | 1 - .../module/globalHistoryManagement/index.scss | 35 +- .../module/globalHistoryManagement/index.vue | 237 +- .../globalHistoryManagement/viewHistory.vue | 57 +- .../src/apps/linkis/module/header/index.vue | 4 +- .../linkis/module/ipListManagement/index.vue | 11 +- .../module/resourceManagement/engineConn.vue | 29 +- .../module/resourceManagement/index.vue | 53 +- .../linkis/module/resourceManagement/log.vue | 52 +- .../module/resourceManagement/search.vue | 12 +- .../EditForm/index.vue | 11 +- .../apps/linkis/module/setting/setting.vue | 60 +- .../module/statisticsDashboard/index.js | 8 + .../module/statisticsDashboard/index.scss | 157 ++ .../module/statisticsDashboard/index.vue | 786 ++++++ .../statisticsDashboard.vue | 441 +++ .../module/tenantTagManagement/index.vue | 11 +- .../src/apps/linkis/module/udfTree/index.scss | 1 - .../apps/linkis/module/userConfig/index.js | 8 +- .../apps/linkis/module/userConfig/index.scss | 45 +- .../apps/linkis/module/userConfig/index.vue | 524 ++++ linkis-web/src/apps/linkis/router.js | 46 +- .../src/apps/linkis/view/linkis/index.vue | 70 +- linkis-web/src/common/i18n/en.json | 2 +- linkis-web/src/common/i18n/zh.json | 2 +- .../components/consoleComponent/result.vue | 7 +- .../consoleComponent/resultSetList.vue | 4 +- .../components/consoleComponent/toolbar.vue | 79 +- linkis-web/src/components/table/table.css | 19 +- linkis-web/src/components/table/table.vue | 76 +- .../historyTable/historyTable.vue | 7 +- linkis-web/src/dss/module/footer/index.vue | 2 +- linkis-web/src/dss/module/header/index.vue | 4 +- .../src/dss/module/resourceSimple/engine.vue | 245 +- .../src/dss/module/resourceSimple/index.scss | 199 +- .../src/dss/module/resourceSimple/job.vue | 2 +- .../src/dss/module/resourceSimple/queue.vue | 21 +- pom.xml | 62 +- tool/dependencies/known-dependencies.txt | 652 ++--- .../regenerate_konwn_dependencies_txt.sh | 2 +- 2119 files changed, 66044 insertions(+), 59569 deletions(-) create mode 100644 linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/JobHistoryInfo.java rename {linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util => linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils}/LinkisUtils.java (64%) create mode 100644 linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java rename linkis-commons/{linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultMetaData.java => linkis-module/src/main/java/org/apache/linkis/proxy/ProxyUserService.java} (84%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java => linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java (68%) create mode 100644 linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java create mode 100644 linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java create mode 100644 linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java create mode 100644 linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/UserWithCreator.scala create mode 100644 linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala delete mode 100644 linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala delete mode 100644 linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala delete mode 100644 linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala delete mode 100644 linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineUnlock.java => linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java (63%) create mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java rename linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.java => linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java (65%) delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala rename linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala (69%) rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/resultset/table/TableRecord.java => scala/org/apache/linkis/storage/LineRecord.scala} (67%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala (91%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/script/ScriptMetaData.java => scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala} (59%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala (91%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java => scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala} (58%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/io/IOClientFactory.java => scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala} (53%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/resultset/ResultRecord.java => scala/org/apache/linkis/storage/resultset/ResultMetaData.scala} (85%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala (56%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala (70%) rename linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala (71%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/LineMetaData.java => scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala} (60%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala rename linkis-commons/{linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala => linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala} (57%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala rename linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala (59%) rename linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala (59%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala delete mode 100644 linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java delete mode 100644 linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java delete mode 100644 linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java delete mode 100644 linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin delete mode 100644 linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin rename linkis-commons/{linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala => linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala} (73%) create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala rename linkis-computation-governance/{linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java => linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala} (51%) create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala create mode 100644 linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala rename linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala => linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala (69%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/IExcelRowDeal.java => linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java (76%) create mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java rename linkis-computation-governance/{linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java => linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java} (55%) rename linkis-computation-governance/{linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/ManagerLabelService.java => linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java} (68%) delete mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java rename linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/{GovernanceCommonConf.scala => GovernaceCommonConf.scala} (81%) create mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala create mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala create mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala create mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java delete mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java delete mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java rename linkis-computation-governance/{linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java => linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java} (53%) create mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala create mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala rename linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java => linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala (91%) rename linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/IRServiceGroupProtocol.scala => linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala (71%) delete mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java delete mode 100644 linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala rename linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextValue.java => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java (61%) create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java => linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java (50%) rename linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java => linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java (68%) delete mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala rename linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/ErrorCodeMapperTest.java => linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala (58%) rename linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/{EngineConnPidCallback.scala => EngineConnIdentifierCallback.scala} (99%) create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java create mode 100644 linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java create mode 100644 linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IORecord.java => linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/EngineLockListener.scala (68%) create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala rename linkis-computation-governance/{linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/OperateRequest.java => linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala} (59%) create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala delete mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala delete mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala delete mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala create mode 100644 linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java create mode 100644 linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java create mode 100644 linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java create mode 100644 linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java create mode 100644 linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestReplaceComment.scala delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common/metrics => am/converter}/MetricsConverter.java (97%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/label/LabelChecker.java (95%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/EMNodPointer.java (96%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/EngineNodePointer.java (96%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/NodePointer.java (96%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/NodePointerBuilder.java (94%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/DefaultECAvailableRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/DefaultNodeSelector.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/AvailableNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/ConcurrencyNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/NewECMStandbyRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/ResourceNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/ScoreNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/TaskInfoNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/cache/ConfCacheRemoveBroadcastListener.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultECMOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMInfoService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMRegisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/ECMOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/EMUnregisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/AbstractEngineService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineConnCanKillService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineInfoService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineRecycleService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineInfoService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineStopService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/monitor/NodeHeartbeatMonitor.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/EMUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/utils/AMUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/utils/DefaultRetryHandler.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/CanCreateECRes.java rename linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextKey.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/ConfigVo.java (51%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/conf/LabelManagerConf.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/DefaultNodeLabelScorer.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/NodeLabelRemoveService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/NodeLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelAddService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultResourceLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/LabelResourceMapping.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/external/kubernetes/KubernetesResourceRequester.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/external/parser/KubernetesResourceIdentifierParser.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/external/yarn/YarnQueueInfo.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/message/RMMessageService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/restful/RMMonitorRest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/RequestResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/ResourceLockService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/ChangeType.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DefaultReqResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DriverAndKubernetesReqResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/LabelResourceServiceImpl.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/ResourceLogService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/UserResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/RMUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/UserConfiguration.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/resources/mapper/{postgresql => common}/EngineConnBmlResourceMapper.xml (69%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/resources/mapper/mysql/EngineConnBmlResourceMapper.xml create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/interceptor/EngineConnLaunchInterceptor.java => scala/org/apache/linkis/engineplugin/server/interceptor/EngineConnLaunchInterceptor.scala} (76%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoader.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/utils/RetryHandler.java => scala/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.scala} (58%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/service/EngineConnLaunchService.java => scala/org/apache/linkis/engineplugin/server/service/EngineConnLaunchService.scala} (73%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/service/EngineConnResourceFactoryService.java => scala/org/apache/linkis/engineplugin/server/service/EngineConnResourceFactoryService.scala} (67%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHook.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.java => scala/org/apache/linkis/manager/am/hook/AskEngineConnHookContext.scala} (78%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/DefaultNodeSelector.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/ECAvailableRule.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/NodeSelector.java => scala/org/apache/linkis/manager/am/selector/NodeSelector.scala} (74%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/AvailableNodeSelectRule.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/CSVFsWriter.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/ConcurrencyNodeSelectRule.scala (60%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/rule/HotspotExclusionRule.java => scala/org/apache/linkis/manager/am/selector/rule/HotspotExclusionRule.scala} (53%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/NewECMStandbyRule.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/rule/NodeSelectRule.java => scala/org/apache/linkis/manager/am/selector/rule/NodeSelectRule.scala} (81%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/OverLoadNodeSelectRule.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/ResourceNodeSelectRule.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/ScoreNodeSelectRule.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/TaskInfoNodeSelectRule.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/EMEngineService.java => scala/org/apache/linkis/manager/am/service/EMEngineService.scala} (52%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Parser.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/EngineService.scala (79%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/HeartbeatService.java => scala/org/apache/linkis/manager/am/service/HeartbeatService.scala} (85%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/cache/ConfCacheRemoveBroadcastListener.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultECMOperateService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMInfoService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMRegisterService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/ECMOperateService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/em/EMInfoService.java => scala/org/apache/linkis/manager/am/service/em/EMInfoService.scala} (63%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/EMRegisterService.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClient.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/EMUnregisterService.scala (70%) rename linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/SolrSourceConfig.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/AbstractEngineService.scala (57%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnCanKillService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineInfoService.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Column.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineOperateService.scala (55%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineRecycleService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/DefaultEngineSwitchService.java => scala/org/apache/linkis/manager/am/service/engine/DefaultEngineSwitchService.scala} (68%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnCanKillService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineCreateService.java => scala/org/apache/linkis/manager/am/service/engine/EngineCreateService.scala} (69%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineInfoService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineKillService.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelAnalysisException.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineOperateService.scala (67%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineRecycleService.java => scala/org/apache/linkis/manager/am/service/engine/EngineRecycleService.scala} (72%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineReuseService.java => scala/org/apache/linkis/manager/am/service/engine/EngineReuseService.scala} (69%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineSwitchService.java => scala/org/apache/linkis/manager/am/service/engine/EngineSwitchService.scala} (72%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/monitor/NodeHeartbeatMonitor.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/utils/AMUtils.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/LabelManagerUtils.java => scala/org/apache/linkis/manager/label/LabelManagerUtils.scala} (61%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/score/DefaultNodeLabelScorer.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/score/NodeLabelScorer.java => scala/org/apache/linkis/manager/label/score/NodeLabelScorer.scala} (60%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/service/NodeLabelAddService.java => scala/org/apache/linkis/manager/label/service/NodeLabelAddService.scala} (75%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/NodeLabelRemoveService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/NodeLabelService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/service/ResourceLabelService.java => scala/org/apache/linkis/manager/label/service/ResourceLabelService.scala} (62%) rename linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/ExpireTypeTest.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/UserLabelService.scala (61%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelAddService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultResourceLabelService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/rm/external/kubernetes/KubernetesResourceIdentifier.java => scala/org/apache/linkis/manager/rm/entity/LabelResourceMap.scala} (51%) rename linkis-computation-governance/linkis-manager/{linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/OperateResponse.java => linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/entity/ResourceOperationType.scala} (81%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/message/RMMessageService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/rm/service/LabelResourceService.java => scala/org/apache/linkis/manager/rm/service/LabelResourceService.scala} (53%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/RequestResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/ResourceLockService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/rm/service/ResourceManager.java => scala/org/apache/linkis/manager/rm/service/ResourceManager.scala} (54%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOMetaData.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultReqResourceService.scala (66%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/LabelResourceServiceImpl.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/UserResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/AcrossClusterRulesJudgeUtils.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/RMUtils.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/UserConfiguration.scala rename linkis-computation-governance/linkis-manager/{linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactory.java => linkis-application-manager/src/main/scala/org/apache/linkis/manager/service/common/label/LabelChecker.scala} (69%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/service/common/label/LabelFilter.scala rename linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCLoadBalancer.scala => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/service/common/label/ManagerLabelService.scala (72%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/Scan.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/WebApplicationServer.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/engineplugin/server/dao/BaseDaoTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/engineplugin/server/dao/EngineConnBmlResourceDaoTest.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/am/util/{ECResourceInfoLinkisUtilsTest.java => ECResourceInfoUtilsTest.java} (98%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/application.properties delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/create.sql delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/create_pg.sql create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/label/conf/LabelManagerConfTest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/rm/utils/RMUtilsTest.scala rename linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ShellECTaskInfo.java => linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/DriverTaskLabel.java (52%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabelTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/conf/ManagerCommonConf.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/Operator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactoryImpl.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestManagerUnlock.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/ECMOperateRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/ECMOperateResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/EMResourceRegisterRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/RegisterEMRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAskAsyncResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAsyncResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallbackToAM.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateError.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineOperateRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineOperateResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResourceUsedProtocol.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResourceWithStatus.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResponseTaskRunningInfo.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/RequestResourceAndWait.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/operator/Operator.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/operator/OperatorFactory.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/{java/org/apache/linkis/manager/common/protocol/RequestEngineLock.java => scala/org/apache/linkis/manager/common/protocol/EngineLock.scala} (56%) rename linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/create_pg.sql => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/OperateRequest.scala (51%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptRecord.java => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/OperateResponse.scala (80%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/ECMOperateRequest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/ECMOperateResponse.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/EMResourceRegisterRequest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/RegisterEMRequest.scala rename linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/RegisterEMResponse.scala (78%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineAsyncResponse.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.scala rename linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestEngineStatus.scala => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineOperateRequest.scala (67%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineOperateResponse.scala rename linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestUserEngineKill.scala => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.scala (69%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.scala rename linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/listener/execution/ExecutionTaskStatusListener.scala => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceWithStatus.scala (70%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/rm/ResourceInfo.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorCreator.java => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/rm/ResultResource.scala (79%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml rename linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/{postgresql => common}/LabelManagerMapper.xml (92%) rename linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/{postgresql => common}/ResourceManagerMapper.xml (85%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/mysql/ECResourceRecordMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/mysql/LabelManagerMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/mysql/ResourceManagerMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/postgresql/ECResourceRecordMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ECResourceRecordMapperTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/LabelManagerMapperTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/create_pg.sql create mode 100644 linkis-dist/package/admin/clear_ec_record.sh create mode 100644 linkis-dist/package/admin/clear_history_task.sh create mode 100644 linkis-dist/package/admin/configuration_helper.sh create mode 100644 linkis-dist/package/admin/linkis_task_res_log_clear.sh create mode 100644 linkis-dist/package/conf/linkis-et-monitor-file.properties create mode 100644 linkis-dist/package/conf/linkis-et-monitor.properties create mode 100644 linkis-dist/package/conf/nacos/application-engineconn.yml create mode 100644 linkis-dist/package/conf/nacos/application-linkis.yml create mode 100644 linkis-dist/package/db/udf/udf_sys.sql create mode 100644 linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql create mode 100644 linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql create mode 100644 linkis-dist/package/sbin/ext/linkis-et-monitor delete mode 100644 linkis-dist/release-docs/licenses/LICENSE-akka-protobuf_2.11.txt delete mode 100644 linkis-dist/release-docs/licenses/LICENSE-akka-slf4j_2.11.txt delete mode 100644 linkis-dist/release-docs/licenses/LICENSE-akka-stream_2.11.txt rename linkis-dist/release-docs/licenses/{LICENSE-akka-actor_2.11.txt => LICENSE-client.txt} (96%) create mode 100644 linkis-dist/release-docs/licenses/LICENSE-jts-core.txt create mode 100644 linkis-engineconn-plugins/doris/pom.xml create mode 100644 linkis-engineconn-plugins/doris/src/main/assembly/distribution.xml rename linkis-engineconn-plugins/{trino/src/main/java/org/apache/linkis/engineplugin/trino/TrinoEngineConnPlugin.java => doris/src/main/java/org/apache/linkis/engineplugin/doris/DorisEngineConnPlugin.java} (74%) rename linkis-engineconn-plugins/{shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/builder/ShellProcessEngineConnLaunchBuilder.java => doris/src/main/java/org/apache/linkis/engineplugin/doris/builder/DorisProcessEngineConnLaunchBuilder.java} (88%) create mode 100644 linkis-engineconn-plugins/doris/src/main/java/org/apache/linkis/engineplugin/doris/conf/DorisConfiguration.java rename linkis-engineconn-plugins/{trino/src/main/java/org/apache/linkis/engineplugin/trino/conf/TrinoEngineConfig.java => doris/src/main/java/org/apache/linkis/engineplugin/doris/conf/DorisEngineConf.java} (94%) create mode 100644 linkis-engineconn-plugins/doris/src/main/java/org/apache/linkis/engineplugin/doris/constant/DorisConstant.java create mode 100644 linkis-engineconn-plugins/doris/src/main/java/org/apache/linkis/engineplugin/doris/errorcode/DorisErrorCodeSummary.java rename linkis-engineconn-plugins/{trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoClientException.java => doris/src/main/java/org/apache/linkis/engineplugin/doris/exception/DorisException.java} (81%) rename linkis-engineconn-plugins/{python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/PythonExecuteError.java => doris/src/main/java/org/apache/linkis/engineplugin/doris/exception/DorisParameterException.java} (80%) rename linkis-engineconn-plugins/{trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoModifySchemaException.java => doris/src/main/java/org/apache/linkis/engineplugin/doris/exception/DorisStreamLoadFileException.java} (79%) create mode 100644 linkis-engineconn-plugins/doris/src/main/java/org/apache/linkis/engineplugin/doris/executor/DorisDatasourceParser.java create mode 100644 linkis-engineconn-plugins/doris/src/main/java/org/apache/linkis/engineplugin/doris/executor/DorisEngineConnExecutor.java create mode 100644 linkis-engineconn-plugins/doris/src/main/java/org/apache/linkis/engineplugin/doris/util/DorisUtils.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/linkis.properties => linkis-engineconn-plugins/doris/src/main/resources/linkis-engineconn.properties (68%) create mode 100644 linkis-engineconn-plugins/doris/src/main/resources/log4j2.xml create mode 100644 linkis-engineconn-plugins/doris/src/main/scala/org/apache/linkis/engineplugin/doris/factory/DorisEngineConnFactory.scala create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/client/utils/FlinkUdfUtils.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/errorcode/FlinkErrorCodeSummary.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/ExecutorInitException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/FlinkInitFailedException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/JobExecutionException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/SqlExecutionException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/SqlParseException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkManagerConcurrentExecutor.scala create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/hook/FlinkJarUdfEngineHook.scala rename linkis-engineconn-plugins/{trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoGrantmaException.java => flink/flink-core/src/test/java/org/apache/linkis/engineplugin/flink/LinkisFlinkUdfExample.java} (75%) rename linkis-engineconn-plugins/hbase/hbase-core/src/main/{scala => java}/org/apache/linkis/manager/engineplugin/hbase/exception/ExecutorInitException.java (100%) delete mode 100644 linkis-engineconn-plugins/io_file/src/test/resources/testIoResult.dolphin delete mode 100644 linkis-engineconn-plugins/io_file/src/test/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutorTest.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/PythonEngineConnPlugin.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/conf/PythonEngineConfiguration.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/hook/PythonVersionEngineHook.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/utils/Kind.java create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/PythonEngineConnPlugin.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/conf/PythonEngineConfiguration.scala rename linkis-engineconn-plugins/python/src/main/{java/org/apache/linkis/manager/engineplugin/python/exception/PythonSessionStartFailedExeception.java => scala/org/apache/linkis/manager/engineplugin/python/exception/NoSupportEngineException.scala} (68%) create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/exception/PythonSessionNullException.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/hook/PythonVersionEngineHook.scala rename linkis-engineconn-plugins/python/src/main/{java/org/apache/linkis/manager/engineplugin/python/launch/PythonProcessEngineConnLaunchBuilder.java => scala/org/apache/linkis/manager/engineplugin/python/launch/PythonProcessEngineConnLaunchBuilder.scala} (81%) create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/utils/Kind.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/utils/State.scala delete mode 100644 linkis-engineconn-plugins/python/src/test/java/org/apache/linkis/manager/engineplugin/python/exception/TestNoSupportEngineException.java delete mode 100644 linkis-engineconn-plugins/python/src/test/java/org/apache/linkis/manager/engineplugin/python/executor/TestPythonEngineConnExecutor.java delete mode 100644 linkis-engineconn-plugins/python/src/test/java/org/apache/linkis/manager/engineplugin/python/factory/TestPythonEngineConnFactory.java rename linkis-engineconn-plugins/python/src/test/{java/org/apache/linkis/manager/engineplugin/python/TestPythonEngineConnPlugin.java => scala/org/apache/linkis/manager/engineplugin/python/TestPythonEngineConnPlugin.scala} (73%) rename linkis-engineconn-plugins/python/src/test/{java/org/apache/linkis/manager/engineplugin/python/conf/TestPythonEngineConfiguration.java => scala/org/apache/linkis/manager/engineplugin/python/conf/TestPythonEngineConfiguration.scala} (59%) create mode 100644 linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/exception/TestNoSupportEngineException.scala create mode 100644 linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/executor/TestPythonEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/factory/TestPythonEngineConnFactory.scala rename linkis-engineconn-plugins/python/src/test/{java/org/apache/linkis/manager/engineplugin/python/utils/TestKind.java => scala/org/apache/linkis/manager/engineplugin/python/utils/TestKind.scala} (66%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResponseTaskYarnResource.java => linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/utils/TestState.scala (52%) delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/ShellEngineConnPlugin.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/common/ShellEngineConnPluginConst.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/conf/ShellEngineConnConf.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/ShellCodeErrorException.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/YarnAppIdExtractor.java create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/ShellEngineConnPlugin.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/builder/ShellProcessEngineConnLaunchBuilder.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/common/ShellEnginePluginConst.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/conf/ShellEngineConnConf.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellECTaskInfo.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/YarnAppIdExtractor.scala delete mode 100644 linkis-engineconn-plugins/shell/src/test/java/org/apache/linkis/manager/engineplugin/shell/executor/TestShellEngineConnExecutor.java rename linkis-engineconn-plugins/shell/src/test/{java/org/apache/linkis/manager/engineplugin/shell/TestShellEngineConnPlugin.java => scala/org/apache/linkis/manager/engineplugin/shell/TestShellEngineConnPlugin.scala} (75%) rename linkis-engineconn-plugins/shell/src/test/{java/org/apache/linkis/manager/engineplugin/shell/common/TestShellEngineConnPluginConst.java => scala/org/apache/linkis/manager/engineplugin/shell/common/TestShellEngineConnPluginConst.scala} (70%) rename linkis-engineconn-plugins/shell/src/test/{java/org/apache/linkis/manager/engineplugin/shell/exception/TestNoCorrectUserException.java => scala/org/apache/linkis/manager/engineplugin/shell/exception/TestNoCorrectUserException.scala} (67%) create mode 100644 linkis-engineconn-plugins/shell/src/test/scala/org/apache/linkis/manager/engineplugin/shell/executor/TestShellEngineConnExecutor.scala rename linkis-engineconn-plugins/spark/{src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestKafkaCala.scala => scala-2.12/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestDorisCala.scala} (73%) create mode 100644 linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/DirectPushRestfulApi.java create mode 100644 linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/DorisSinkConfig.java create mode 100644 linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/DorisSourceConfig.java create mode 100644 linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/executor/SecureRandomStringUtils.java create mode 100644 linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/DorisSink.scala rename linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/{SolrSource.scala => DorisSource.scala} (70%) create mode 100644 linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/utils/ArrowUtils.scala create mode 100644 linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/utils/DirectPushCache.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/resources/etltest.dolphin delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestExcelCala.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestRedisCalc.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestRocketmqCala.scala create mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestArrowUtil.scala create mode 100644 linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/client/RemoteClientHolder.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/NotEnoughResource.java => linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/exception/DataSourceRpcErrorException.scala (69%) create mode 100644 linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/ConnectParamsResolver.scala create mode 100644 linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopDataSourceParamsResolver.scala delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.java rename {linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/java/org/apache/linkis/engineconn/common => linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino}/password/CommandPasswordCallback.java (93%) rename {linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/java/org/apache/linkis/engineconn/common => linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino}/password/StaticPasswordCallback.java (95%) delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/utils/TrinoCode.java create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/TrinoEngineConnPlugin.scala rename linkis-engineconn-plugins/trino/src/main/{java/org/apache/linkis/engineplugin/trino/builder/TrinoProcessEngineConnLaunchBuilder.java => scala/org/apache/linkis/engineplugin/trino/builder/TrinoProcessEngineConnLaunchBuilder.scala} (67%) create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoEngineConfig.scala rename linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/utils/ProtocolUtilsTest.scala => linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/exception/TrinoException.scala (56%) create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/utils/TrinoCode.scala rename linkis-engineconn-plugins/trino/src/main/{java/org/apache/linkis/engineplugin/trino/utils/TrinoSQLHook.java => scala/org/apache/linkis/engineplugin/trino/utils/TrinoSQLHook.scala} (72%) create mode 100644 linkis-extensions/linkis-et-monitor/pom.xml create mode 100644 linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/ResourceOperationType.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java (86%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsWriter.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java (61%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java rename linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ResourceValueBean.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java (69%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ApplicationConfiguration.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineOperateService.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java (51%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/RegisterEMResponse.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java (59%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSet.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java (51%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResourceInfo.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java (69%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelReportRequest.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java (58%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java (65%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineLock.java => linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java (52%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskArchiveClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/JobMonitorUtils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml create mode 100644 linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml create mode 100644 linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml create mode 100644 linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala rename linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ProtocolUtils.scala => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala (51%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineService.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala (78%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineRecord.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java (60%) rename linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoStateInvalidException.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala (72%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/GetEngineConnResourceRequest.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala (76%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/index/JobIndexHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/index/JobIndexRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/index/JobIndexSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/ECAvailableRule.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala (69%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResource.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala (77%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/EngineLock.java => linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala (82%) create mode 100644 linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java create mode 100644 linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java create mode 100644 linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/pom.xml (98%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/LinkisBMLApplication.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/Constant.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/ExecutorManager.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/HdfsResourceHelper.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/LocalResourceHelper.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/OperationEnum.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/ResourceHelper.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/ResourceHelperFactory.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/common/ScheduledTask.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/BmlProjectDao.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/DownloadDao.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/ResourceDao.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/TaskDao.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/VersionDao.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/entity/BmlProject.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/entity/DownloadModel.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/entity/Resource.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/entity/ResourceTask.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/entity/ResourceVersion.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/entity/Version.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/errorcode/BmlServerErrorCodeSummary.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/restful/BmlProjectRestful.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/restful/BmlRestfulApi.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/restful/RestfulUtils.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/BmlProjectService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/BmlService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/BmlShareResourceService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/DownloadService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/ResourceService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/TaskService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/VersionService.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/BmlProjectServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/BmlServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/BmlShareResourceServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/DownloadServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/ResourceServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/TaskServiceImpl.java (98%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/service/impl/VersionServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/threading/Scheduler.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/threading/Task.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/threading/TaskState.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/util/HttpRequestHelper.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/util/MD5Utils.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/vo/ResourceBasicVO.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/vo/ResourceVO.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/java/org/apache/linkis/bml/vo/ResourceVersionsVO.java (100%) rename linkis-public-enhancements/{linkis-bml/linkis-bml-server/src/main/resources/mapper/postgresql => linkis-bml-server/src/main/resources/mapper/common}/BmlProjectMapper.xml (63%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/resources/mapper/common/DownloadMapper.xml (78%) rename linkis-public-enhancements/{linkis-bml/linkis-bml-server/src/main/resources/mapper/mysql => linkis-bml-server/src/main/resources/mapper/common}/ResourceMapper.xml (82%) rename linkis-public-enhancements/{linkis-bml/linkis-bml-server/src/main/resources/mapper/mysql => linkis-bml-server/src/main/resources/mapper/common}/TaskMapper.xml (91%) rename linkis-public-enhancements/{linkis-bml/linkis-bml-server/src/main/resources/mapper/postgresql => linkis-bml-server/src/main/resources/mapper/common}/VersionMapper.xml (74%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/common/BmlAuthorityException.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/common/BmlPermissionDeniedException.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/common/BmlQueryFailException.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/common/BmlResourceExpiredException.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/common/BmlServerParaErrorException.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/conf/BmlServerConfiguration.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/protocol/BmlServerProtocol.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/main/scala/org/apache/linkis/bml/rpc/BmlReceiver.scala (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/Scan.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/WebApplicationServer.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/common/HdfsResourceHelperTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/common/LocalResourceHelperTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/common/VersionServiceImplTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BaseDaoTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java (92%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/DownloadDaoTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/TaskDaoTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/service/BmlProjectServiceTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/service/DownloadServiceImplTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/service/ResourceServiceImplTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/service/TaskServiceImplTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/java/org/apache/linkis/bml/service/VersionServiceImplTest.java (100%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/resources/application.properties (71%) rename linkis-public-enhancements/{linkis-bml => }/linkis-bml-server/src/test/resources/create.sql (100%) delete mode 100644 linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/resources/mapper/mysql/BmlProjectMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/resources/mapper/mysql/VersionMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/resources/mapper/postgresql/ResourceMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/resources/mapper/postgresql/TaskMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/resources/create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-bml/pom.xml create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java rename linkis-public-enhancements/{linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/CgManagerLabelMapperTest.java => linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java} (52%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelFilter.java => linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/DepartmentMapper.java (73%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/UserLabelService.java => linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/DepartmentTenantMapper.java (55%) create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Variable.java => linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java (55%) create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/DepartmentTenantVo.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/DepartmentVo.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshEngineConnResourceRequest.java => linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java (77%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/AvailableResource.java => linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java (71%) create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateRestfulApi.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/EMRegisterService.java => linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/DepartmentService.java (71%) create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/DepartmentServiceImpl.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java rename linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/{HttpsUtil.java => ClientUtil.java} (93%) create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/AcrossClusterRuleMapper.xml create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigKeyLimitForUserMapper.xml rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/ConfigMapper.xml (52%) rename linkis-public-enhancements/{linkis-pes-publicservice/src/main/resources/mapper/postgresql/UdfManagerMapper.xml => linkis-configuration/src/main/resources/mapper/common/DepartmentMapper.xml} (50%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{postgresql/UserTenantMapper.xml => common/DepartmentTenantMapper.xml} (53%) create mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/TemplateConfigKeyMapper.xml rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/UserIpMapper.xml (93%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/UserTenantMapper.xml (87%) delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/UserIpMapper.xml create mode 100644 linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java create mode 100644 linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-configuration/src/test/resources/create_pg.sql create mode 100644 linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql delete mode 100644 linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/resources/mapper/postgresql/contextHistoryMapper.xml delete mode 100644 linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/resources/mapper/postgresql/contextIDListenerMapper.xml delete mode 100644 linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/resources/mapper/postgresql/contextIDMapper.xml delete mode 100644 linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/resources/mapper/postgresql/contextMapMapper.xml delete mode 100644 linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextMapMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-context-service/pom.xml rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/pom.xml (94%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/ContextSearch.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/DefaultContextSearch.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/AbstractCommonCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/AtomicCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/BinaryLogicCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/Condition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/ConditionType.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/UnaryLogicCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/AndConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ConditionBuilder.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ConditionBuilderImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ContainsConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ContextScopeConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ContextTypeConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/ContextValueTypeConditionParser.java (67%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/NearestConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/NotConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/OrConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/construction/RegexConditionParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/AndCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/ContainsCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/ContextScopeCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/ContextTypeCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/ContextValueTypeCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/NearestCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/NotCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/OrCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/impl/RegexCondition.java (100%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineConnCanKillService.java => linkis-public-enhancements/linkis-cs-server/src/main/java/org/apache/linkis/cs/conf/CSConfiguration.java (67%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/ContextCacheService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/DefaultContextCacheService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/ContextCache.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/DefaultContextAddListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/DefaultContextCache.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/csid/ContextIDValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/csid/ContextIDValueGenerator.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/csid/impl/ContextIDValueGeneratorImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/csid/impl/ContextIDValueImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/cskey/ContextKeyValueContext.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/cskey/ContextValueMapSet.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/cskey/impl/ContextValueMapSetImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/cskey/impl/DefaultContextKeyValueContext.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cache/guava/ContextIDRemoveListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/cleaner/AUTOCleaner.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/conf/ContextCacheConf.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/index/ContextInvertedIndex.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/index/ContextInvertedIndexSet.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/index/ContextInvertedIndexSetImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/index/DefaultContextInvertedIndex.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/ContextCacheMetric.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/ContextIDMetric.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/DefaultContextCacheMetric.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/DefaultContextIDMetric.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/Metrtic.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/ObjectInfo.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/metric/SizeEstimator.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/parser/ContextKeyValueParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/parser/DefaultContextKeyValueParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/contextcache/utils/ContextCacheUtils.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummary.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/exception/ContextSearchFailedException.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/AbstractConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/ConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/fetcher/AbstractContextCacheFetcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/fetcher/ContextCacheFetcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/fetcher/ContextTypeContextSearchFetcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/fetcher/IterateContextCacheFetcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/AndConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/BinaryLogicConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/ContainsConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/ContextScopeConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/ContextTypeConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/ContextValueTypeConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/NearestConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/NotConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/OrConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/RegexConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/impl/UnaryLogicConditionExecution.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/AbstractContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/AndLogicContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/BinaryLogicContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/ConditionMatcherResolver.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/ContainsContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/ContextScopeContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/ContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/ContextTypeContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/ContextValueTypeContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/NearestLogicContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/NotLogicContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/OrLogicContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/RegexContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/SkipContextSearchMather.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/matcher/UnaryLogicContextSearchMatcher.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/ruler/AbstractContextSearchRuler.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/ruler/CommonListContextSearchRuler.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/ruler/ContextSearchRuler.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/execution/ruler/NearestContextSearchRuler.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/AbstractContextHAManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ContextHAManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/DefaultContextHAManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/conf/ContextHighAvailableConf.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/exception/CSErrorCode.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ha/BackupInstanceGenerator.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ha/ContextHAChecker.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ha/ContextHAIDGenerator.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ha/impl/BackupInstanceGeneratorImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ha/impl/ContextHACheckerImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/ha/impl/ContextHAIDGeneratorImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/pluggable/HAContextPersistenceManagerImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/highavailable/proxy/MethodInterceptorImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/ConditionOptimizer.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/OptimizedCondition.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/cost/ConditionCostCalculator.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/dfs/BinaryTree.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/dfs/MinCostBinaryTree.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/dfs/Node.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/optimize/impl/CostBasedConditionOptimizer.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/ContextPersistenceBeans.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/ContextPersistenceManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/ContextPersistenceManagerImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/annotation/Ignore.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/annotation/Tuning.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/aop/PersistenceTuningAspect.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/conf/PersistenceConf.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/ContextHistoryMapper.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/ContextIDListenerMapper.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/ContextIDMapper.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/ContextKeyListenerMapper.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/ContextMapMapper.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/ExtraFieldClass.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextHistory.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextID.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextIDListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextKey.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextKeyListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextKeyValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/entity/PersistenceContextValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/exception/ThrowingFunction.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/ContextHistoryPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/ContextIDListenerPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/ContextIDPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/ContextKeyListenerPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/ContextMapPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/ContextMetricsPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/KeywordContextHistoryPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/TransactionManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/ContextHistoryPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/ContextIDListenerPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/ContextIDPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/ContextKeyListenerPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/ContextMapPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/ContextMetricsPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/KeywordContextHistoryPersistenceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/persistence/impl/TransactionManagerImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/util/PersistenceUtils.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/LinkisCSApplication.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java (93%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/enumeration/ServiceMethod.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/enumeration/ServiceType.java (100%) create mode 100644 linkis-public-enhancements/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/parser/DefaultKeywordParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/parser/KeywordMethodEntity.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/parser/KeywordParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/AbstractHttpRequestProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/ContextHistoryProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/ContextIDProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/ContextListenerProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/ContextProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/HttpProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/HttpRequestProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/HttpResponseProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/protocol/RestResponseProtocol.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/restful/ContextHistoryRestfulApi.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/restful/ContextIDRestfulApi.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/restful/ContextListenerRestfulApi.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/restful/ContextRestfulApi.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/restful/CsRestfulParent.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/CsScheduler.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/DefaultCsScheduler.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/HttpAnswerJob.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/HttpAnswerJobBuilder.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/HttpJob.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/HttpJobBuilder.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/HttpPriorityJob.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/RestJobBuilder.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/impl/CsExecuteRequest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/impl/CsExecutorExecutionManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/impl/CsJobListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/impl/CsSchedulerBean.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/impl/CsSchedulerJob.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/scheduler/impl/JobToExecuteRequestConsumer.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/AbstractService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/ContextHistoryService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/ContextIDService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/ContextListenerService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/ContextService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/Service.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/impl/ContextHistoryServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/impl/ContextIDServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/impl/ContextListenerServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/service/impl/ContextServiceImpl.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/util/CsUtils.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/resources/cs_ddl.sql (100%) rename linkis-public-enhancements/{linkis-context-service/linkis-cs-server/src/main/resources/mapper/mysql => linkis-cs-server/src/main/resources/mapper/common}/contextHistoryMapper.xml (86%) rename linkis-public-enhancements/{linkis-context-service/linkis-cs-server/src/main/resources/mapper/mysql => linkis-cs-server/src/main/resources/mapper/common}/contextIDListenerMapper.xml (83%) rename linkis-public-enhancements/{linkis-context-service/linkis-cs-server/src/main/resources/mapper/mysql => linkis-cs-server/src/main/resources/mapper/common}/contextIDMapper.xml (98%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/resources/mapper/common/contextKeyListenerMapper.xml (90%) rename linkis-public-enhancements/{linkis-context-service/linkis-cs-server/src/main/resources/mapper/mysql => linkis-cs-server/src/main/resources/mapper/common}/contextMapMapper.xml (98%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/InstanceAliasConverter.scala (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/InstanceAliasManager.scala (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/DefaultInstanceAliasConverter.scala (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/InstanceAliasManagerImpl.scala (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/RouteLabelInstanceAliasConverter.scala (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/main/scala/org/apache/linkis/cs/server/scheduler/impl/CsExecutor.scala (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/AndTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/ContainsTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/ContextScopeTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/ContextSearchTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/ContextTypeTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/RegexTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/Scan.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/WebApplicationServer.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/contextcache/test/csid/TestContextID.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/contextcache/test/keyword/TestContextKey.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/contextcache/test/keyword/TestContextKeyValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/contextcache/test/keyword/TestContextKeyValueParser.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/contextcache/test/keyword/TestContextValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/contextcache/test/service/TestContextCacheService.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/csid/TestContextID.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummaryTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/exception/ContextSearchFailedExceptionTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/highavailable/test/TestContextHAManager.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/highavailable/test/haid/TestHAID.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/highavailable/test/persist/TestPersistence.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/keyword/TestContextKey.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/keyword/TestContextKeyValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/keyword/TestContextValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/parser/ApiJsonTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextHistory.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextID.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextIDListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextKey.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextKeyListener.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextKeyValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/AContextValue.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ContextHistoryTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ContextIDListenerTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ContextIDTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ContextKeyListenerTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ContextMapTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ExtraFieldClassTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/MapTypeAdapter.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ProxyMethodA.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/ProxyTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/Scan.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/conf/PersistenceConfTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/BaseDaoTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextHistoryMapperTest.java (94%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDListenerMapperTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDMapperTest.java (89%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextKeyListenerMapperTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/utils/PersistenceUtilsTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/CsRestfulParent.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/Scan.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/SchedulerTest.java (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java (93%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/resources/application.properties (77%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/resources/application.yml (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/resources/create.sql (81%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/resources/linkis.properties (100%) rename linkis-public-enhancements/{linkis-context-service => }/linkis-cs-server/src/test/resources/log4j2.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/elasticsearch/pom.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/elasticsearch/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java (96%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticParamsMapper.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hdfs/pom.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hdfs/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hive/pom.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hive/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveParamsMapper.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/pom.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/AbstractSqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/ClickhouseMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java (98%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/clickhouse/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/conf/SqlParamsMapper.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java (90%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/dm/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/greenplum/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/kingbase/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/mysql/SqlConnection.java (96%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/oracle/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/postgres/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/sqlserver/SqlConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/kafka/pom.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/kafka/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaConnection.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaParamsMapper.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/mongodb/pom.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/mongodb/src/main/assembly/distribution.xml (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbConnection.java (97%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbParamsMapper.java (100%) rename linkis-public-enhancements/linkis-datasource/{linkis-metadata-query => linkis-datasource-manager}/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongodbMetaService.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/pom.xml (97%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/InsLabelAutoConfiguration.java (94%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/async/AsyncConsumerQueue.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/async/GenericAsyncConsumerQueue.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/cache/InsLabelCacheConfiguration.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/conf/InsLabelConf.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/dao/InsLabelRelationDao.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/dao/InstanceInfoDao.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/dao/InstanceLabelDao.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InsPersistenceLabel.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InsPersistenceLabelValue.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InstanceInfo.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/errorcode/LinkisInstanceLabelErrorCodeSummary.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/exception/InstanceErrorException.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java (77%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/InsLabelAccessService.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/InsLabelService.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/InsLabelServiceAdapter.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/annotation/AdapterMode.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java (98%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelServiceAdapter.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/SpringInsLabelService.java (97%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/utils/EntityParser.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/vo/InsPersistenceLabelSearchVo.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/vo/InstanceInfoVo.java (100%) rename linkis-public-enhancements/{linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql => linkis-instance-label-server/src/main/resources/mapper/common}/InsLabelRelationMapper.xml (74%) rename linkis-public-enhancements/{linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql => linkis-instance-label-server/src/main/resources/mapper/common}/InstanceInfoMapper.xml (74%) rename linkis-public-enhancements/{linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql => linkis-instance-label-server/src/main/resources/mapper/common}/InstanceLabelMapper.xml (70%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/scala/org/apache/linkis/instance/label/service/InsLabelRpcService.scala (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/main/scala/org/apache/linkis/instance/label/service/rpc/DefaultInsLabelRpcService.scala (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/Scan.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/WebApplicationServer.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/dao/BaseDaoTest.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/dao/InsLabelRelationDaoTest.java (90%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/dao/InstanceLabelDaoTest.java (94%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelServiceTest.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/utils/EntityParserTest.java (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/resources/application.properties (100%) rename linkis-public-enhancements/{linkis-instance-label => }/linkis-instance-label-server/src/test/resources/create.sql (100%) delete mode 100644 linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml delete mode 100644 linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InstanceInfoMapper.xml delete mode 100644 linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InstanceLabelMapper.xml delete mode 100644 linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/dao/InstanceInfoDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/test/resources/create_pg.sql create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobStatisticsMapper.java create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/entity/JobStatistics.java create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/StatisticsRestfulApi.java create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/util/JobhistoryUtils.java create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/common/JobStatisticsMapper.xml create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobStatisticsMapper.xml create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/postgresql/JobStatisticsMapper.xml rename linkis-web-next/src/components/editor/highRiskGrammar.ts => linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/JobStatisticsQueryService.java (65%) create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobStatisticsQueryServiceImpl.scala delete mode 100644 linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobDetailMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-jobhistory/src/test/resources/create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummaryTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/builder/ContextClientFactoryTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/builder/HttpContextClientConfigTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/errorcode/CsClientErrorCodeSummaryTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/Test.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextKey.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/listener/CommonContextKeyListener.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/no_context_search/TestClear.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/RestfulTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/service/TestInfo.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/service/TestRemove.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/service/TestSearchService.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/test_multiuser/TestChangeContext.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/test_multiuser/TestCreateContext.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/utils/ContextClientConfTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/utils/ContextServiceUtilsTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/utils/SerializeHelperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/TestDataSourceClient.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/TestHiveClient.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/TestMysqlClient.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/config/DatasourceClientConfigTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/exception/DataSourceClientBuilderExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/imp/LinkisDataSourceRemoteClientTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/ClientConfigurationTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/ErrorCodeClientBuilderTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/LinkisErrorCodeClientTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/handler/ErrorCodeHandlerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandlerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/manager/LinkisErrorCodeManagerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/synchronizer/LinkisErrorCodeSynchronizerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/errorcode/client/action/ErrorCodeActionTest.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/action/OpenScriptFromBMLActionTest.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/conf/WorkspaceClientConfTest.scala create mode 100644 linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/PythonModuleInfoVO.java create mode 100644 linkis-public-enhancements/linkis-pes-common/src/main/scala/org/apache/linkis/udf/api/rpc/RequestPythonModuleProtocol.scala rename linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/engine/EngineInfo.java => linkis-public-enhancements/linkis-pes-common/src/main/scala/org/apache/linkis/udf/api/rpc/ResponsePythonModuleProtocol.scala (60%) delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/ContextScopeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/ContextTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/DBTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/WorkTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextHistoryTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextIDTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextKeyTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextKeyValueTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextValueTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/helper/ContextSerializationHelperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/context/CombinedNodeIDContextIDSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/context/CommonContextKeySerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/LinkisBMLResourceSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/data/CSResultDataSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/data/LinkisJobDataSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/metadata/CSTableSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/object/CSFlowInfosSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/test/ContextSerializationHelperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/utils/CSCommonUtilsTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/utils/CSHighAvailableUtilsTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextID.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextKeyValue.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/errorcode/common/CommonConfTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/MdmConfigurationTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/cache/CacheConfigurationTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/cache/ConnCacheManagerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaMethodInvokeExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaRuntimeExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/udf/excepiton/UDFExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/udf/utils/ConstantVarTest.java create mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java rename linkis-public-enhancements/linkis-pes-publicservice/src/{test/java/org/apache/linkis/basedatamanager/server/dao/BaseDaoTest.java => main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java} (65%) create mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java create mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java create mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java create mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/ConfigurationConfigKeyMapper.xml (68%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceAccessMapper.xml (91%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceEnvMapper.xml (90%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceTypeKeyMapper.xml (92%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceTypeMapper.xml (86%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/GatewayAuthTokenMapper.xml (90%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{postgresql => common}/PsErrorCodeMapper.xml (86%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/RmExternalResourceProviderMapper.xml (86%) rename linkis-web-next/src/pages/parameterConfig/scripts/index.vue => linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/common/UdfBaseInfoMapper.xml (76%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/UdfManagerMapper.xml (93%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/UdfTreeMapper.xml (88%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/VarMapper.xml (91%) delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/mysql/PsErrorCodeMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/ConfigurationConfigKeyMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceAccessMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceEnvMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceTypeKeyMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceTypeMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/GatewayAuthTokenMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/RmExternalResourceProviderMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/UdfTreeMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/VarMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/Scan.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/WebApplicationServer.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/ConfigurationConfigKeyMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/ConfigurationConfigValueMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/ConfigurationKeyEngineRelationMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceAccessMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceEnvMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceTypeKeyMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceTypeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/EngineConnPluginBmlResourcesMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/GatewayAuthTokenMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/PsErrorCodeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/RmExternalResourceProviderMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/UdfTreeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/ConfigurationTemplateRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceAccessRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceEnvRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeKeyRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/MvcUtils.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceAccessServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceEnvServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceTypeKeyServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceTypeServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/ErrorCodeServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/GatewayAuthTokenServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/RmExternalResourceProviderServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/UdfManagerServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/UdfTreeServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/Scan.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/WebApplicationServer.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/BaseDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/Scan.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/WebApplicationServer.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/dao/BaseDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/dao/VarMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/restful/MvcUtils.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/restful/api/VariableRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/service/VariableServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/error_code_create.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/error_code_create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/info.text delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/variable_create.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/variable_create_pg.sql create mode 100644 linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/data.sql rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/pom.xml (98%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/api/UDFRestfulApi.java (77%) rename linkis-public-enhancements/{linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/UdfManagerMapperTest.java => linkis-udf-service/src/main/java/org/apache/linkis/udf/dao/PythonModuleInfoMapper.java} (50%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/dao/UDFDao.java (97%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/dao/UDFTreeDao.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/dao/UDFVersionDao.java (100%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/entity/PythonModuleInfo.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/rm/external/dao/ExternalResourceProviderDaoTest.java => linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/PythonModuleInfoService.java (54%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/UDFService.java (97%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/UDFTreeService.java (100%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/PythonModuleInfoServiceImpl.java rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java (99%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFTreeServiceImpl.java (100%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/resources/mapper/common/PythonModuleInfoMapper.xml rename linkis-public-enhancements/{linkis-udf/linkis-udf-service/src/main/resources/mapper/postgresql => linkis-udf-service/src/main/resources/mapper/common}/UDFDao.xml (76%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/resources/mapper/common/UDFTreeDao.xml (73%) rename linkis-public-enhancements/{linkis-udf/linkis-udf-service/src/main/resources/mapper/postgresql => linkis-udf-service/src/main/resources/mapper/common}/UDFVersionDao.xml (84%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/scala/org/apache/linkis/udf/api/rpc/UdfReceiver.scala rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/scala/org/apache/linkis/udf/api/rpc/UdfReceiverChooser.scala (83%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/main/scala/org/apache/linkis/udf/utils/UdfConfiguration.scala (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/Scan.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/WebApplicationServer.java (100%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/java/org/apache/linkis/udf/api/PythonModuleRestfulApiTest.java rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/api/UDFRestfulApiTest.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/BaseDaoTest.java (100%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/PythonModuleInfoMapperTest.java rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFDaoTest.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFTreeDaoTest.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFVersionDaoTest.java (100%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/PythonModuleInfoServiceTest.java rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFServiceTest.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFTreeServiceTest.java (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/resources/application.properties (76%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/resources/create.sql rename linkis-public-enhancements/{linkis-udf/linkis-udf-service/src/test/resources/create.sql => linkis-udf-service/src/test/resources/data.sql} (62%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/resources/linkis.properties (100%) rename linkis-public-enhancements/{linkis-udf => }/linkis-udf-service/src/test/scala/org/apache/linkis/udf/utils/UdfConfigurationTest.scala (100%) delete mode 100644 linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml delete mode 100644 linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFVersionDao.xml delete mode 100644 linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/scala/org/apache/linkis/udf/api/rpc/UdfReceiver.scala delete mode 100644 linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create_pg.sql create mode 100644 linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/ECMRequestGatewayParser.scala create mode 100644 linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/IpPriorityLoadBalancer.java create mode 100644 linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/LinkisLoadBalancerClientConfiguration.java create mode 100644 linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/SpringCloudGatewayConstant.java create mode 100644 linkis-web-next/public/log/noLog.svg rename linkis-web-next/public/sidebar/{dateReport.svg => globalVariable.svg} (100%) delete mode 100644 linkis-web-next/src/components/editor/editor.vue delete mode 100644 linkis-web-next/src/components/editor/index.less create mode 100644 linkis-web-next/src/components/editor/index.vue delete mode 100644 linkis-web-next/src/components/editor/keyword/hql.ts delete mode 100644 linkis-web-next/src/components/editor/keyword/python.ts delete mode 100644 linkis-web-next/src/components/editor/keyword/sas.ts delete mode 100644 linkis-web-next/src/components/editor/keyword/sh.ts delete mode 100644 linkis-web-next/src/components/editor/languages/hql.ts delete mode 100644 linkis-web-next/src/components/editor/languages/out.ts delete mode 100644 linkis-web-next/src/components/editor/languages/sas.ts delete mode 100644 linkis-web-next/src/components/editor/languages/sh.ts delete mode 100644 linkis-web-next/src/components/editor/linkis_dml.sql delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/core/Formatter.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/core/Indentation.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/core/InlineBlock.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/core/Tokenizer.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/languages/Db2Formatter.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/languages/N1qlFormatter.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/languages/PlSqlFormatter.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/languages/StandardSqlFormatter.ts delete mode 100644 linkis-web-next/src/components/editor/sqlFormatter/sqlFormatter.ts delete mode 100644 linkis-web-next/src/dss/assets/styles/app.less delete mode 100644 linkis-web-next/src/dss/assets/styles/normalize.less delete mode 100644 linkis-web-next/src/dss/view/app.vue delete mode 100644 linkis-web-next/src/dss/view/commonIframe/index.vue delete mode 100644 linkis-web-next/src/dss/view/layout.vue delete mode 100644 linkis-web-next/src/dss/view/logPage/index.vue create mode 100644 linkis-web-next/src/pages/ECMManagement/index.vue create mode 100644 linkis-web-next/src/pages/ECMManagement/modal.vue delete mode 100644 linkis-web-next/src/pages/globalHistoryManagement/drawer.vue create mode 100644 linkis-web-next/src/pages/globalHistoryManagement/drawer/index.vue create mode 100644 linkis-web-next/src/pages/globalHistoryManagement/drawer/taskDetails.vue create mode 100644 linkis-web-next/src/pages/globalHistoryManagement/drawer/taskLogs.vue create mode 100644 linkis-web-next/src/pages/globalHistoryManagement/drawer/taskResults.vue delete mode 100644 linkis-web-next/src/pages/globalHistoryManagement/table.vue create mode 100644 linkis-web-next/src/pages/globalVariables/index.vue delete mode 100644 linkis-web-next/src/pages/login/index.vue create mode 100644 linkis-web-next/src/pages/microServiceManagement/index.vue create mode 100644 linkis-web-next/src/pages/microServiceManagement/modal.vue delete mode 100644 linkis-web-next/src/pages/parameterConfig/handleChange.ts delete mode 100644 linkis-web-next/src/pages/parameterConfig/ide/index.vue delete mode 100644 linkis-web-next/src/pages/parameterConfig/list.vue delete mode 100644 linkis-web-next/src/pages/parameterConfig/tableauServer/index.vue create mode 100644 linkis-web-next/src/pages/resource/history/drawer/index.vue create mode 100644 linkis-web-next/src/pages/resource/history/drawer/log.vue delete mode 100644 linkis-web-next/src/util/currentModules.ts create mode 100644 linkis-web/release-docs/licenses/LICENSE-hint.css.txt create mode 100644 linkis-web/src/apps/linkis/assets/styles/hint.min.css rename linkis-web-next/src/pages/resource/index.less => linkis-web/src/apps/linkis/module/acrossClusterRule/index.js (87%) rename linkis-web-next/src/components/editor/sqlFormatter/core/Params.ts => linkis-web/src/apps/linkis/module/acrossClusterRule/index.scss (53%) create mode 100644 linkis-web/src/apps/linkis/module/acrossClusterRule/index.vue rename linkis-web-next/src/pages/resource/history/index.less => linkis-web/src/apps/linkis/module/configManagement/index.js (87%) create mode 100644 linkis-web/src/apps/linkis/module/configManagement/index.scss create mode 100644 linkis-web/src/apps/linkis/module/configManagement/index.vue rename linkis-web-next/src/pages/resource/manage/index.less => linkis-web/src/apps/linkis/module/statisticsDashboard/index.js (85%) create mode 100644 linkis-web/src/apps/linkis/module/statisticsDashboard/index.scss create mode 100644 linkis-web/src/apps/linkis/module/statisticsDashboard/index.vue create mode 100644 linkis-web/src/apps/linkis/module/statisticsDashboard/statisticsDashboard.vue rename linkis-web-next/src/components/editor/index.ts => linkis-web/src/apps/linkis/module/userConfig/index.js (89%) rename linkis-web-next/src/components/editor/sqlFormatter/core/tokenTypes.ts => linkis-web/src/apps/linkis/module/userConfig/index.scss (64%) create mode 100644 linkis-web/src/apps/linkis/module/userConfig/index.vue diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f38af2d393..531a1ff88a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -12,7 +12,7 @@ and session management. ### Related issues/PRs -Related issues: #590 +Related issues: close #590 close #591 Related pr:#591 diff --git a/.github/workflows/check-license.yml b/.github/workflows/check-license.yml index 3c79607dc3..2a6cf67f23 100644 --- a/.github/workflows/check-license.yml +++ b/.github/workflows/check-license.yml @@ -36,7 +36,7 @@ jobs: echo "rat_file=$rat_file" if [[ -n "$rat_file" ]];then echo "check error!" && cat $rat_file && exit 123;else echo "check success!" ;fi - name: Upload the report - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: license-check-report path: "**/target/rat.txt" diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index ce0b8ceec9..5c97ec8229 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -45,7 +45,7 @@ jobs: TAG: ${{ github.sha }} SKIP_TEST: true HUB: ghcr.io/apache/linkis - LINKIS_VERSION: 1.5.0-SNAPSHOT + LINKIS_VERSION: 1.6.0 steps: - name: Free up disk space run: | diff --git a/.github/workflows/publish-docker.yaml b/.github/workflows/publish-docker.yaml index ec07c2eff3..73e8d840b5 100644 --- a/.github/workflows/publish-docker.yaml +++ b/.github/workflows/publish-docker.yaml @@ -34,7 +34,7 @@ jobs: TAG: ${{ github.sha }} SKIP_TEST: true HUB: ghcr.io/apache/linkis - LINKIS_VERSION: 1.5.0-SNAPSHOT + LINKIS_VERSION: 1.6.0 steps: - name: Checkout uses: actions/checkout@v2 diff --git a/.github/workflows/publish-snapshot.yml b/.github/workflows/publish-snapshot.yml index 8bb65c90f4..93e34ab9c2 100644 --- a/.github/workflows/publish-snapshot.yml +++ b/.github/workflows/publish-snapshot.yml @@ -29,7 +29,7 @@ jobs: fail-fast: false matrix: branch: - - dev-1.4.0 + - dev-1.6.0 steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/LICENSE b/LICENSE index c1c8f6f847..e2ef2e8970 100644 --- a/LICENSE +++ b/LICENSE @@ -241,6 +241,7 @@ The following file are provided under the Apache 2.0 License. linkis-engineconn-plugins/hbase/hbase-shims-1.4.3/src/main/resources/hbase-ruby/* linkis-engineconn-plugins/hbase/hbase-shims-2.2.6/src/main/resources/hbase-ruby/* linkis-engineconn-plugins/hbase/hbase-shims-2.5.3/src/main/resources/hbase-ruby/* + linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/executor/SecureRandomStringUtils.java The files: .mvn/wrapper/MavenWrapperDownloader.java diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/FatalException.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/FatalException.java index 4847d76d63..26a6992c05 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/FatalException.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/FatalException.java @@ -17,7 +17,7 @@ package org.apache.linkis.common.exception; -public class FatalException extends LinkisRuntimeException { +public class FatalException extends LinkisException { private ExceptionLevel level = ExceptionLevel.FATAL; public FatalException(int errCode, String desc) { diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java index e434bd72ee..f4eaa6697b 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java @@ -44,6 +44,8 @@ public interface Fs extends Closeable { boolean canRead(FsPath dest) throws IOException; + boolean canRead(FsPath dest, String user) throws IOException; + boolean canWrite(FsPath dest) throws IOException; boolean exists(FsPath dest) throws IOException; diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/JobHistoryInfo.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/JobHistoryInfo.java new file mode 100644 index 0000000000..2e72ea9949 --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/JobHistoryInfo.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import java.util.Date; + +public class JobHistoryInfo { + private String jobReqId; + private String submitUser; + private String executeUser; + private String source; + private String labels; + private String params; + private String progress; + private String status; + private String logPath; + private Integer errorCode; + private String errorDesc; + private Date createdTime; + private Date updatedTime; + private String instances; + private String metrics; + private String engineType; + private String executionCode; + + public String getJobReqId() { + return jobReqId; + } + + public void setJobReqId(String jobReqId) { + this.jobReqId = jobReqId; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public String getParams() { + return params; + } + + public void setParams(String params) { + this.params = params; + } + + public String getProgress() { + return progress; + } + + public void setProgress(String progress) { + this.progress = progress; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public Integer getErrorCode() { + return errorCode; + } + + public void setErrorCode(Integer errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public Date getCreatedTime() { + return createdTime; + } + + public void setCreatedTime(Date createdTime) { + this.createdTime = createdTime; + } + + public Date getUpdatedTime() { + return updatedTime; + } + + public void setUpdatedTime(Date updatedTime) { + this.updatedTime = updatedTime; + } + + public String getInstances() { + return instances; + } + + public void setInstances(String instances) { + this.instances = instances; + } + + public String getMetrics() { + return metrics; + } + + public void setMetrics(String metrics) { + this.metrics = metrics; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getExecutionCode() { + return executionCode; + } + + public void setExecutionCode(String executionCode) { + this.executionCode = executionCode; + } + + public JobHistoryInfo( + String jobReqId, + String submitUser, + String executeUser, + String source, + String labels, + String params, + String progress, + String status, + String logPath, + Integer errorCode, + String errorDesc, + Date createdTime, + Date updatedTime, + String instances, + String metrics, + String engineType, + String executionCode) { + this.jobReqId = jobReqId; + this.submitUser = submitUser; + this.executeUser = executeUser; + this.source = source; + this.labels = labels; + this.params = params; + this.progress = progress; + this.status = status; + this.logPath = logPath; + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.createdTime = createdTime; + this.updatedTime = updatedTime; + this.instances = instances; + this.metrics = metrics; + this.engineType = engineType; + this.executionCode = executionCode; + } + + public JobHistoryInfo() {} +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/LinkisUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/LinkisUtils.java similarity index 64% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/LinkisUtils.java rename to linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/LinkisUtils.java index ec9f0d669e..353f80f1da 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/LinkisUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/LinkisUtils.java @@ -15,17 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.manager.am.util; +package org.apache.linkis.common.utils; import org.apache.linkis.common.exception.ErrorException; import org.apache.linkis.common.exception.FatalException; import org.apache.linkis.common.exception.WarnException; -import java.time.Duration; -import java.util.concurrent.*; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.Callable; import java.util.function.Function; -import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,14 +30,6 @@ public class LinkisUtils { private static final Logger logger = LoggerFactory.getLogger(LinkisUtils.class); - public static final ScheduledThreadPoolExecutor defaultScheduler = - new ScheduledThreadPoolExecutor(20, threadFactory("Linkis-Default-Scheduler-Thread-", true)); - - { - defaultScheduler.setMaximumPoolSize(20); - defaultScheduler.setKeepAliveTime(5, TimeUnit.MINUTES); - } - public static T tryCatch(Callable tryOp, Function catchOp) { T result = null; try { @@ -170,78 +159,6 @@ public static T tryAndWarnMsg(Callable tryOp, String message, Logger log) }); } - /** - * Checks if event has occurred during some time period. This performs an exponential backoff to - * limit the poll calls. - * - * @param checkForEvent event to check, until it is true - * @param atMost most wait time - * @return - * @throws java.util.concurrent.TimeoutException throws this exception when it is timeout - * @throws java.lang.InterruptedException throws this exception when it is interrupted - */ - public static void waitUntil( - Supplier checkForEvent, Duration atMost, int radix, long maxPeriod) - throws TimeoutException, InterruptedException { - long endTime; - try { - endTime = System.currentTimeMillis() + atMost.toMillis(); - } catch (IllegalArgumentException e) { - endTime = 0L; - } - - int count = 1; - while (!checkForEvent.get()) { - long now = System.currentTimeMillis(); - if (endTime == 0 || now < endTime) { - long sleepTime = Math.max(Math.min(radix * count, maxPeriod), 100); - TimeUnit.MILLISECONDS.sleep(sleepTime); - count++; - } else { - throw new TimeoutException(); - } - } - } - - public static void waitUntil(Supplier checkForEvent, Duration atMost) - throws TimeoutException, InterruptedException { - waitUntil(checkForEvent, atMost, 100, 2000); - } - - public static ExecutorService newFixedThreadPool( - int threadNum, String threadName, boolean isDaemon) { - ThreadFactory threadFactory = threadFactory(threadName, isDaemon); - return Executors.newFixedThreadPool(threadNum, threadFactory); - } - - public static ThreadPoolExecutor newCachedThreadPool( - Integer threadNum, String threadName, Boolean isDaemon) { - ThreadPoolExecutor threadPool = - new ThreadPoolExecutor( - threadNum, - threadNum, - 120L, - TimeUnit.SECONDS, - new LinkedBlockingQueue(10 * threadNum), - threadFactory(threadName, isDaemon)); - threadPool.allowCoreThreadTimeOut(true); - return threadPool; - } - - private static ThreadFactory threadFactory(String threadName, boolean isDaemon) { - return new ThreadFactory() { - AtomicInteger num = new AtomicInteger(0); - - @Override - public Thread newThread(Runnable r) { - Thread t = new Thread(r); - t.setDaemon(isDaemon); - t.setName(threadName + num.getAndIncrement()); - return t; - } - }; - } - public static String getJvmUser() { return System.getProperty("user.name"); } diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java new file mode 100644 index 0000000000..1291b8bb68 --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +public class MD5Utils { + + /** + * @param plaintext + * @return + * @throws NoSuchAlgorithmException + */ + public static String encrypt(String plaintext) throws NoSuchAlgorithmException { + // 使用 MD5 算法创建 MessageDigest 对象 + MessageDigest md = MessageDigest.getInstance("MD5"); + // 更新 MessageDigest 对象中的字节数据 + md.update(plaintext.getBytes()); + // 对更新后的数据计算哈希值,存储在 byte 数组中 + byte[] digest = md.digest(); + // 将 byte 数组转换为十六进制字符串 + StringBuilder sb = new StringBuilder(); + for (byte b : digest) { + sb.append(String.format("%02x", b & 0xff)); + } + // 返回十六进制字符串 + return sb.toString(); + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java index 0278b3337e..c08d16b529 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java @@ -79,6 +79,9 @@ public abstract class SecurityUtils { private static final String JDBC_MYSQL_PROTOCOL = "jdbc:mysql"; + private static final String BLACKLIST_REGEX = + "autodeserialize|allowloadlocalinfile|allowurlinlocalinfile|allowloadlocalinfileinpath"; + /** * check mysql connection params * @@ -118,6 +121,10 @@ public static void checkJdbcConnParams( // 3. Check params. Mainly vulnerability parameters. Note the url encoding checkParams(extraParams); + + // 4. Check url security, especially for the possibility of malicious characters appearing on + // the host + checkUrlIsSafe(url); } /** @param url */ @@ -283,6 +290,35 @@ private static void checkParams(Map paramsMap) { } } + /** + * check url is safe + * + * @param url + */ + public static void checkUrlIsSafe(String url) { + try { + String lowercaseURL = url.toLowerCase(); + + Pattern pattern = Pattern.compile(BLACKLIST_REGEX); + Matcher matcher = pattern.matcher(lowercaseURL); + + StringBuilder foundKeywords = new StringBuilder(); + while (matcher.find()) { + if (foundKeywords.length() > 0) { + foundKeywords.append(", "); + } + foundKeywords.append(matcher.group()); + } + + if (foundKeywords.length() > 0) { + throw new LinkisSecurityException( + 35000, "url contains blacklisted characters: " + foundKeywords); + } + } catch (Exception e) { + throw new LinkisSecurityException(35000, "error occurred during url security check: " + e); + } + } + private static Map parseMysqlUrlParamsToMap(String paramsUrl) { if (StringUtils.isBlank(paramsUrl)) { return new LinkedHashMap<>(); @@ -320,4 +356,40 @@ private static boolean isNotSecurity(String key, String value, String param) { return key.toLowerCase().contains(param.toLowerCase()) || value.toLowerCase().contains(param.toLowerCase()); } + + /** + * allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false + * + * @return + */ + public static Properties getMysqlSecurityParams() { + Properties properties = new Properties(); + properties.setProperty("allowLoadLocalInfile", "false"); + properties.setProperty("autoDeserialize", "false"); + properties.setProperty("allowLocalInfile", "false"); + properties.setProperty("allowUrlInLocalInfile", "false"); + return properties; + } + + /** + * Check if the path has a relative path + * + * @param path + * @return + */ + public static boolean containsRelativePath(String path) { + if (path.startsWith("./") + || path.contains("/./") + || path.startsWith("../") + || path.contains("/../")) { + return true; + } + if (path.startsWith(".\\") + || path.contains("\\.\\") + || path.startsWith("..\\") + || path.contains("\\..\\")) { + return true; + } + return false; + } } diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java index 615472474d..d1cb59c397 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java @@ -17,12 +17,10 @@ package org.apache.linkis.common.utils; +import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.common.exception.VariableOperationFailedException; -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; +import java.time.*; import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.Iterator; @@ -62,9 +60,16 @@ public class VariableOperationUtils { * @return */ public static ZonedDateTime toZonedDateTime(Date date, ZoneId zoneId) { - Instant instant = date.toInstant(); - LocalDateTime localDateTime = instant.atZone(zoneId).toLocalDateTime(); - return ZonedDateTime.of(localDateTime, zoneId); + if (Configuration.VARIABLE_OPERATION_USE_NOW()) { + LocalTime currentTime = LocalTime.now(); + LocalDate localDate = date.toInstant().atZone(zoneId).toLocalDate(); + LocalDateTime localDateTime = LocalDateTime.of(localDate, currentTime); + return ZonedDateTime.of(localDateTime, zoneId); + } else { + Instant instant = date.toInstant(); + LocalDateTime localDateTime = instant.atZone(zoneId).toLocalDateTime(); + return ZonedDateTime.of(localDateTime, zoneId); + } } /** diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala index 8fcb4af737..f9e4718472 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala @@ -20,11 +20,17 @@ package org.apache.linkis.common class ServiceInstance { private var applicationName: String = _ private var instance: String = _ + private var registryTimestamp: Long = _ def setApplicationName(applicationName: String): Unit = this.applicationName = applicationName def getApplicationName: String = applicationName def setInstance(instance: String): Unit = this.instance = instance def getInstance: String = instance + def setRegistryTimestamp(registryTimestamp: Long): Unit = this.registryTimestamp = + registryTimestamp + + def getRegistryTimestamp: Long = registryTimestamp + override def equals(other: Any): Boolean = other match { case that: ServiceInstance => applicationName == that.applicationName && @@ -42,7 +48,9 @@ class ServiceInstance { .foldLeft(0)((a, b) => 31 * a + b) } - override def toString: String = s"ServiceInstance($applicationName, $instance)" + override def toString: String = + s"ServiceInstance($applicationName, $instance, $registryTimestamp)" + } object ServiceInstance { @@ -54,6 +62,14 @@ object ServiceInstance { serviceInstance } + def apply(applicationName: String, instance: String, registryTimestamp: Long): ServiceInstance = { + val serviceInstance = new ServiceInstance + serviceInstance.setApplicationName(applicationName) + serviceInstance.setInstance(instance) + serviceInstance.setRegistryTimestamp(registryTimestamp) + serviceInstance + } + def unapply(serviceInstance: ServiceInstance): Option[(String, String)] = if (serviceInstance != null) { Some(serviceInstance.applicationName, serviceInstance.instance) diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala index 14febab63a..9bfa053b77 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala @@ -232,19 +232,20 @@ private[conf] object BDPConfiguration extends Logging { private[common] def formatValue[T](defaultValue: T, value: Option[String]): Option[T] = { if (value.isEmpty || value.exists(StringUtils.isEmpty)) return Option(defaultValue) + val trimValue = value.map(_.trim) val formattedValue = defaultValue match { - case _: String => value - case _: Byte => value.map(_.toByte) - case _: Short => value.map(_.toShort) - case _: Char => value.map(_.toCharArray.apply(0)) - case _: Int => value.map(_.toInt) - case _: Long => value.map(_.toLong) - case _: Float => value.map(_.toFloat) - case _: Double => value.map(_.toDouble) - case _: Boolean => value.map(_.toBoolean) - case _: TimeType => value.map(new TimeType(_)) - case _: ByteType => value.map(new ByteType(_)) - case null => value + case _: String => trimValue + case _: Byte => trimValue.map(_.toByte) + case _: Short => trimValue.map(_.toShort) + case _: Char => trimValue.map(_.toCharArray.apply(0)) + case _: Int => trimValue.map(_.toInt) + case _: Long => trimValue.map(_.toLong) + case _: Float => trimValue.map(_.toFloat) + case _: Double => trimValue.map(_.toDouble) + case _: Boolean => trimValue.map(_.toBoolean) + case _: TimeType => trimValue.map(new TimeType(_)) + case _: ByteType => trimValue.map(new ByteType(_)) + case null => trimValue } formattedValue.asInstanceOf[Option[T]] } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala index 9443f15262..822bc2aa07 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala @@ -33,7 +33,7 @@ object Configuration extends Logging { val IS_PROMETHEUS_ENABLE = CommonVars("wds.linkis.prometheus.enable", false) - val IS_MULTIPLE_YARN_CLUSTER = CommonVars("linkis.multiple.yarn.cluster", false) + val IS_MULTIPLE_YARN_CLUSTER = CommonVars("linkis.multiple.yarn.cluster", false).getValue val PROMETHEUS_ENDPOINT = CommonVars("wds.linkis.prometheus.endpoint", "/actuator/prometheus") @@ -65,10 +65,15 @@ object Configuration extends Logging { val JOB_HISTORY_ADMIN = CommonVars("wds.linkis.jobhistory.admin", "hadoop") + val JOB_HISTORY_DEPARTMENT_ADMIN = CommonVars("wds.linkis.jobhistory.department.admin", "hadoop") + // Only the specified token has permission to call some api val GOVERNANCE_STATION_ADMIN_TOKEN_STARTWITH = "ADMIN-" - val VARIABLE_OPERATION: Boolean = CommonVars("wds.linkis.variable.operation", false).getValue + val VARIABLE_OPERATION_USE_NOW: Boolean = + CommonVars("wds.linkis.variable.operation.use.now", true).getValue + + val IS_VIEW_FS_ENV = CommonVars("wds.linkis.env.is.viewfs", true) val ERROR_MSG_TIP = CommonVars( @@ -76,6 +81,18 @@ object Configuration extends Logging { "The request interface %s is abnormal. You can try to troubleshoot common problems in the knowledge base document" ) + val LINKIS_TOKEN = CommonVars("wds.linkis.token", "LINKIS-AUTH") + + val GLOBAL_CONF_CHN_NAME = "全局设置" + + val GLOBAL_CONF_CHN_OLDNAME = "通用设置" + + val GLOBAL_CONF_CHN_EN_NAME = "GlobalSettings" + + val GLOBAL_CONF_SYMBOL = "*" + + val GLOBAL_CONF_LABEL = "*-*,*-*" + def isAdminToken(token: String): Boolean = { if (StringUtils.isBlank(token)) { false @@ -124,10 +141,22 @@ object Configuration extends Logging { .exists(username.equalsIgnoreCase) } + def isDepartmentAdmin(username: String): Boolean = { + val departmentAdminUsers = JOB_HISTORY_DEPARTMENT_ADMIN.getHotValue.split(",") + departmentAdminUsers.exists(username.equalsIgnoreCase) + } + def getJobHistoryAdmin(): Array[String] = { val adminUsers = GOVERNANCE_STATION_ADMIN.getHotValue.split(",") val historyAdminUsers = JOB_HISTORY_ADMIN.getHotValue.split(",") (adminUsers ++ historyAdminUsers).distinct } + def getGlobalCreator(creator: String): String = creator match { + case Configuration.GLOBAL_CONF_CHN_NAME | Configuration.GLOBAL_CONF_CHN_OLDNAME | + Configuration.GLOBAL_CONF_CHN_EN_NAME => + GLOBAL_CONF_SYMBOL + case _ => creator + } + } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala index 77c82f3883..e558e765be 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala @@ -33,7 +33,7 @@ object LogUtils { } def generateERROR(rawLog: String): String = { - getTimeFormat + " " + "ERROR" + " " + rawLog + getTimeFormat + " " + ERROR_STR + " " + rawLog } def generateWarn(rawLog: String): String = { @@ -52,4 +52,6 @@ object LogUtils { getTimeFormat + " " + "SYSTEM-WARN" + " " + rawLog } + val ERROR_STR = "ERROR" + } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala index 3870fe6e58..e6e63a9779 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala @@ -21,6 +21,10 @@ import org.apache.linkis.common.conf.CommonVars import org.apache.commons.lang3.StringUtils +import java.util.Locale + +import scala.collection.mutable + object CodeAndRunTypeUtils { private val CONF_LOCK = new Object() @@ -29,7 +33,7 @@ object CodeAndRunTypeUtils { */ val CODE_TYPE_AND_RUN_TYPE_RELATION = CommonVars( "linkis.codeType.language.relation", - "sql=>sql|hql|jdbc|hive|psql|fql|tsql,python=>python|py|pyspark,java=>java,scala=>scala,shell=>sh|shell,json=>json|data_calc" + "sql=>sql|hql|jdbc|hive|psql|fql|tsql|nebula|ngql,python=>python|py|pyspark,java=>java,scala=>scala,shell=>sh|shell,json=>json|data_calc" ) val LANGUAGE_TYPE_SQL = "sql" @@ -101,14 +105,23 @@ object CodeAndRunTypeUtils { def getLanguageTypeAndCodeTypeRelationMap: Map[String, String] = { val codeTypeAndRunTypeRelationMap = getCodeTypeAndLanguageTypeRelationMap if (codeTypeAndRunTypeRelationMap.isEmpty) Map() - else codeTypeAndRunTypeRelationMap.flatMap(x => x._2.map(y => (y, x._1))) + else { +// codeTypeAndRunTypeRelationMap.flatMap(x => x._2.map(y => (y, x._1))) + val map = mutable.Map[String, String]() + codeTypeAndRunTypeRelationMap.foreach(kv => { + kv._2.foreach(v => map.put(v, kv._1)) + }) + map.toMap + } } def getLanguageTypeByCodeType(codeType: String, defaultLanguageType: String = ""): String = { if (StringUtils.isBlank(codeType)) { return "" } - getLanguageTypeAndCodeTypeRelationMap.getOrElse(codeType, defaultLanguageType) + val lowerCaseCodeType = codeType.toLowerCase(Locale.getDefault) + getLanguageTypeAndCodeTypeRelationMap.getOrElse(lowerCaseCodeType, defaultLanguageType) + } /** diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala index b53184eceb..e021b9a482 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala @@ -19,12 +19,17 @@ package org.apache.linkis.common.utils import org.apache.linkis.common.conf.CommonVars +import org.apache.commons.codec.binary.Hex import org.apache.commons.lang3.StringUtils import javax.naming.Context import javax.naming.ldap.InitialLdapContext +import java.nio.charset.StandardCharsets import java.util.Hashtable +import java.util.concurrent.TimeUnit + +import com.google.common.cache.{Cache, CacheBuilder, RemovalListener, RemovalNotification} object LDAPUtils extends Logging { @@ -38,7 +43,33 @@ object LDAPUtils extends Logging { val baseDN = CommonVars("wds.linkis.ldap.proxy.baseDN", "").getValue val userNameFormat = CommonVars("wds.linkis.ldap.proxy.userNameFormat", "").getValue + private val storeUser: Cache[String, String] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(60, TimeUnit.MINUTES) + .removalListener(new RemovalListener[String, String] { + + override def onRemoval(removalNotification: RemovalNotification[String, String]): Unit = { + logger.info(s"store user remove key: ${removalNotification.getKey}") + } + + }) + .build() + def login(userID: String, password: String): Unit = { + + val saltPwd = storeUser.getIfPresent(userID) + if (StringUtils.isNotBlank(saltPwd)) { + Utils.tryAndWarn { + if ( + saltPwd.equalsIgnoreCase(Hex.encodeHexString(password.getBytes(StandardCharsets.UTF_8))) + ) { + logger.info(s"user $userID login success for storeUser") + return + } + } + } + val env = new Hashtable[String, String]() val bindDN = if (StringUtils.isBlank(userNameFormat)) userID @@ -53,6 +84,9 @@ object LDAPUtils extends Logging { env.put(Context.SECURITY_CREDENTIALS, bindPassword) new InitialLdapContext(env, null) + Utils.tryAndWarn { + storeUser.put(userID, Hex.encodeHexString(password.getBytes(StandardCharsets.UTF_8))) + } logger.info(s"user $userID login success.") } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala index 80e3ff7e5e..deac2f2464 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala @@ -43,6 +43,8 @@ import org.slf4j.Logger object Utils extends Logging { + val DEFAULE_SCHEDULER_THREAD_NAME_PREFIX = "Linkis-Default-Scheduler-Thread-" + def tryQuietly[T](tryOp: => T): T = tryQuietly(tryOp, _ => ()) def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = { @@ -199,7 +201,7 @@ object Utils extends Logging { val defaultScheduler: ScheduledThreadPoolExecutor = { val scheduler = - new ScheduledThreadPoolExecutor(20, threadFactory("Linkis-Default-Scheduler-Thread-", true)) + new ScheduledThreadPoolExecutor(20, threadFactory(DEFAULE_SCHEDULER_THREAD_NAME_PREFIX, true)) scheduler.setMaximumPoolSize(20) scheduler.setKeepAliveTime(5, TimeUnit.MINUTES) scheduler diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala index 6c5bd7cf3c..bd2fab4930 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala @@ -43,6 +43,8 @@ object VariableUtils extends Logging { val RUN_TODAY_H = "run_today_h" + val RUN_TODAY_HOUR = "run_today_hour" + private val codeReg = "\\$\\{\\s*[A-Za-z][A-Za-z0-9_\\.]*\\s*[\\+\\-\\*/]?\\s*[A-Za-z0-9_\\.]*\\s*\\}".r @@ -83,6 +85,13 @@ object VariableUtils extends Logging { nameAndType(RUN_TODAY_H) = HourType(runTodayH) } } + if (variables.containsKey(RUN_TODAY_HOUR)) { + val runTodayHourStr = variables.get(RUN_TODAY_HOUR).asInstanceOf[String] + if (StringUtils.isNotBlank(runTodayHourStr)) { + val runTodayHour = new CustomHourType(runTodayHourStr, false) + nameAndType(RUN_TODAY_HOUR) = HourType(runTodayHour) + } + } initAllDateVars(run_date, nameAndType) val codeOperation = parserVar(replaceStr, nameAndType) parserDate(codeOperation, run_date) @@ -141,6 +150,13 @@ object VariableUtils extends Logging { nameAndType(RUN_TODAY_H) = HourType(runTodayH) } } + if (variables.containsKey(RUN_TODAY_HOUR)) { + val runTodayHourStr = variables.get(RUN_TODAY_HOUR).asInstanceOf[String] + if (StringUtils.isNotBlank(runTodayHourStr)) { + val runTodayHour = new CustomHourType(runTodayHourStr, false) + nameAndType(RUN_TODAY_HOUR) = HourType(runTodayHour) + } + } initAllDateVars(run_date, nameAndType) val codeOperation = parserVar(code, nameAndType) parserDate(codeType, codeOperation, run_date) @@ -148,21 +164,13 @@ object VariableUtils extends Logging { @deprecated private def parserDate(code: String, run_date: CustomDateType): String = { - if (Configuration.VARIABLE_OPERATION) { - val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) - VariableOperationUtils.replaces(zonedDateTime, code) - } else { - code - } + val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) + VariableOperationUtils.replaces(zonedDateTime, code) } private def parserDate(codeType: String, code: String, run_date: CustomDateType): String = { - if (Configuration.VARIABLE_OPERATION) { - val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) - VariableOperationUtils.replaces(codeType, zonedDateTime, code) - } else { - code - } + val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) + VariableOperationUtils.replaces(codeType, zonedDateTime, code) } private def initAllDateVars( @@ -265,6 +273,30 @@ object VariableUtils extends Logging { nameAndType("run_today_h_std") = HourType( new CustomHourType(nameAndType(RUN_TODAY_H).asInstanceOf[HourType].getValue, true) ) + // calculate run_today_hour base on run_date + if (nameAndType.contains("run_today_hour")) { + nameAndType("run_today_hour").asInstanceOf[HourType] + } else { + val run_today_hour = new CustomHourType(getCurHour(false, run_today.toString), false) + nameAndType("run_today_hour") = HourType(run_today_hour) + } + nameAndType("run_today_hour_std") = HourType( + new CustomHourType(nameAndType("run_today_hour").asInstanceOf[HourType].getValue, true) + ) + // calculate run_last_mon base on run_today + val run_roday_mon = new CustomMonType(getMonthDay(false, run_today.getDate), false) + nameAndType("run_last_mon_now") = MonType(new CustomMonType(run_roday_mon - 1, false, false)) + nameAndType("run_last_mon_now_std") = MonType(new CustomMonType(run_roday_mon - 1, true, false)) + // calculate run_current_mon_now base on run_today + nameAndType("run_current_mon_now") = MonType( + new CustomMonType(run_roday_mon.toString, false, false) + ) + nameAndType("run_current_mon_now_std") = MonType( + new CustomMonType(run_roday_mon.toString, true, false) + ) + // calculate run_mon_now base on run_today + nameAndType("run_mon_now") = MonType(new CustomMonType(run_roday_mon.toString, false, false)) + nameAndType("run_mon_now_std") = MonType(new CustomMonType(run_roday_mon.toString, true, false)) } /** diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java index b24bad2467..5d77cb323b 100644 --- a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java @@ -38,21 +38,10 @@ public class VariableOperationTest { @Test public void testSqlFormat() throws VariableOperationFailedException { - String jsonOld = - "select \n" - + "\"&{yyyy-MM}\",\n" - + "\"&{yyyy-MM-dd HHmmss}\",\n" - + "\"&yyyyMMddHH\",\n" - + "\"&{yyyy-MM-dd-HH}\""; + String jsonOld = "select \n" + "\"&{yyyy-MM}\""; String jsonNew = VariableOperationUtils.replaces(zonedDateTime, jsonOld); System.out.println(jsonNew); - assertEquals( - jsonNew, - "select \n" - + "\"2022-04\",\n" - + "\"2022-04-02 173507\",\n" - + "\"&yyyyMMddHH\",\n" - + "\"2022-04-02-17\""); + assertEquals(jsonNew, "select \n" + "\"2022-04\""); } @Test diff --git a/linkis-commons/linkis-common/src/test/resources/linkis.properties b/linkis-commons/linkis-common/src/test/resources/linkis.properties index 230ad85a4e..d6e47523f2 100644 --- a/linkis-commons/linkis-common/src/test/resources/linkis.properties +++ b/linkis-commons/linkis-common/src/test/resources/linkis.properties @@ -13,4 +13,6 @@ # limitations under the License. # -linkis.jobhistory.error.msg.tip=properties支持中文 \ No newline at end of file +linkis.jobhistory.error.msg.tip=properties支持中文 +linkis.test.error.conf=123 +linkis.test.error.conf2= 456 \ No newline at end of file diff --git a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala index ee1102c91c..33c8229a4b 100644 --- a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala +++ b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala @@ -27,4 +27,11 @@ class ConfigurationTest { Assertions.assertFalse(Configuration.isAdmin("HaDooop")) } + @Test private[conf] def testFormatValue(): Unit = { + val confvalue = CommonVars[Int]("linkis.test.error.conf", 456).getValue + val confvalue2 = CommonVars[Int]("linkis.test.error.conf2", 789).getValue + Assertions.assertTrue(123 == confvalue) + Assertions.assertTrue(456 == confvalue2) + } + } diff --git a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala index e7a105497c..892731e0d5 100644 --- a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala +++ b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala @@ -42,6 +42,10 @@ class VariableUtilsTest { |'${run_half_year_begin-1}' as run_half_year_begin_sub1, |'${run_half_year_begin_std}' as run_half_year_begin_std, |'${run_half_year_end}' as run_half_year_end, + |'${run_last_mon_now}' as run_last_mon_now, + |'${run_last_mon_now_std}' as run_last_mon_now_std, + |'${submit_user}' as submit_user, + |'${execute_user}' as execute_user, |'${run_today_h+12}' as run_today_h_add1""".stripMargin val run_date = new CustomDateType(run_date_str, false) val dateType = DateType(run_date) @@ -59,9 +63,15 @@ class VariableUtilsTest { |'20190701' as run_half_year_begin_sub1, |'2020-01-01' as run_half_year_begin_std, |'20200630' as run_half_year_end, + |'202001' as run_last_mon_now, + |'2020-01' as run_last_mon_now_std, + |'hadoop' as submit_user, + |'hadoop' as execute_user, |'${hourTypeRes}' as run_today_h_add1""".stripMargin val varMap = new util.HashMap[String, String]() varMap.put("run_date", run_date_str) + varMap.put("execute_user", "hadoop") + varMap.put("submit_user", "hadoop") assertEquals(VariableUtils.replace(sql, "sql", varMap), resSql) } diff --git a/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java b/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java index 6c5c125f6d..67fecd04be 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java +++ b/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java @@ -17,12 +17,14 @@ package org.apache.linkis.hadoop.common.utils; +import org.apache.linkis.common.utils.Utils; import org.apache.linkis.hadoop.common.conf.HadoopConf; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; +import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,6 +35,10 @@ public class KerberosUtils { private static final Logger LOG = LoggerFactory.getLogger(KerberosUtils.class); + private static boolean kerberosRefreshStarted = false; + + private static final Object kerberosRefreshLock = new Object(); + private KerberosUtils() {} private static Configuration createKerberosSecurityConfiguration() { @@ -81,20 +87,20 @@ public static boolean runRefreshKerberosLogin() { public static Long getKerberosRefreshInterval() { long refreshInterval; - String refreshIntervalString = "86400000"; - // defined in linkis-env.sh, if not initialized then the default value is 86400000 ms (1d). - if (System.getenv("LINKIS_JDBC_KERBEROS_REFRESH_INTERVAL") != null) { - refreshIntervalString = System.getenv("LINKIS_JDBC_KERBEROS_REFRESH_INTERVAL"); + String refreshIntervalString = "43200"; + // defined in linkis-env.sh, if not initialized then the default value is 43200 s (0.5d). + if (System.getenv("LINKIS_KERBEROS_REFRESH_INTERVAL") != null) { + refreshIntervalString = System.getenv("LINKIS_KERBEROS_REFRESH_INTERVAL"); } try { refreshInterval = Long.parseLong(refreshIntervalString); } catch (NumberFormatException e) { LOG.error( - "Cannot get time in MS for the given string, " + "Cannot get time in S for the given string, " + refreshIntervalString - + " defaulting to 86400000 ", + + " defaulting to 43200 ", e); - refreshInterval = 86400000L; + refreshInterval = 43200; } return refreshInterval; } @@ -102,14 +108,13 @@ public static Long getKerberosRefreshInterval() { public static Integer kinitFailTimesThreshold() { Integer kinitFailThreshold = 5; // defined in linkis-env.sh, if not initialized then the default value is 5. - if (System.getenv("LINKIS_JDBC_KERBEROS_KINIT_FAIL_THRESHOLD") != null) { + if (System.getenv("LINKIS_KERBEROS_KINIT_FAIL_THRESHOLD") != null) { try { - kinitFailThreshold = - new Integer(System.getenv("LINKIS_JDBC_KERBEROS_KINIT_FAIL_THRESHOLD")); + kinitFailThreshold = new Integer(System.getenv("LINKIS_KERBEROS_KINIT_FAIL_THRESHOLD")); } catch (Exception e) { LOG.error( "Cannot get integer value from the given string, " - + System.getenv("LINKIS_JDBC_KERBEROS_KINIT_FAIL_THRESHOLD") + + System.getenv("LINKIS_KERBEROS_KINIT_FAIL_THRESHOLD") + " defaulting to " + kinitFailThreshold, e); @@ -117,4 +122,70 @@ public static Integer kinitFailTimesThreshold() { } return kinitFailThreshold; } + + public static void checkStatus() { + try { + LOG.info("isSecurityEnabled:" + UserGroupInformation.isSecurityEnabled()); + LOG.info( + "userAuthenticationMethod:" + + UserGroupInformation.getLoginUser().getAuthenticationMethod()); + UserGroupInformation loginUsr = UserGroupInformation.getLoginUser(); + UserGroupInformation curUsr = UserGroupInformation.getCurrentUser(); + LOG.info("LoginUser: " + loginUsr); + LOG.info("CurrentUser: " + curUsr); + if (curUsr == null) { + LOG.info("CurrentUser is null"); + } else { + LOG.info("CurrentUser is not null"); + } + if (loginUsr.getClass() != curUsr.getClass()) { + LOG.info("getClass() is different"); + } else { + LOG.info("getClass() is same"); + } + if (loginUsr.equals(curUsr)) { + LOG.info("subject is equal"); + } else { + LOG.info("subject is not equal"); + } + } catch (Exception e) { + LOG.error("UGI error: ", e.getMessage()); + } + } + + public static void startKerberosRefreshThread() { + + if (kerberosRefreshStarted || !HadoopConf.KERBEROS_ENABLE()) { + LOG.warn( + "kerberos refresh thread had start or not kerberos {}", HadoopConf.HDFS_ENABLE_CACHE()); + return; + } + synchronized (kerberosRefreshLock) { + if (kerberosRefreshStarted) { + LOG.warn("kerberos refresh thread had start"); + return; + } + kerberosRefreshStarted = true; + LOG.info("kerberos Refresh tread started"); + Utils.defaultScheduler() + .scheduleAtFixedRate( + () -> { + try { + checkStatus(); + if (UserGroupInformation.isLoginKeytabBased()) { + LOG.info("Trying re-login from keytab"); + UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); + } else if (UserGroupInformation.isLoginTicketBased()) { + LOG.info("Trying re-login from ticket cache"); + UserGroupInformation.getLoginUser().reloginFromTicketCache(); + } + } catch (Exception e) { + LOG.error("Unable to re-login", e); + } + }, + getKerberosRefreshInterval(), + getKerberosRefreshInterval(), + TimeUnit.SECONDS); + } + } } diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala index b3e5cf2024..c550b3f517 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala @@ -23,7 +23,7 @@ object HadoopConf { val HADOOP_ROOT_USER = CommonVars("wds.linkis.hadoop.root.user", "hadoop") - val KERBEROS_ENABLE = CommonVars("wds.linkis.keytab.enable", false) + val KERBEROS_ENABLE = CommonVars("wds.linkis.keytab.enable", false).getValue val KERBEROS_ENABLE_MAP = CommonVars("linkis.keytab.enable.map", "cluster1=false,cluster2=true") @@ -53,8 +53,17 @@ object HadoopConf { val HADOOP_EXTERNAL_CONF_DIR_PREFIX = CommonVars("wds.linkis.hadoop.external.conf.dir.prefix", "/appcom/config/external-conf/hadoop") + /** + * Whether to close the hdfs underlying cache or turn it off if it is ture + */ + val FS_CACHE_DISABLE = + CommonVars[java.lang.Boolean]("wds.linkis.fs.hdfs.impl.disable.cache", false) + val HDFS_ENABLE_CACHE = CommonVars("wds.linkis.hadoop.hdfs.cache.enable", false).getValue + val HDFS_ENABLE_CACHE_CLOSE = + CommonVars("linkis.hadoop.hdfs.cache.close.enable", true).getValue + val HDFS_ENABLE_CACHE_IDLE_TIME = CommonVars("wds.linkis.hadoop.hdfs.cache.idle.time", 3 * 60 * 1000).getValue diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala index 6b4eaaeceb..f87f89393e 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala @@ -48,8 +48,7 @@ class HDFSFileSystemContainer(fs: FileSystem, user: String, label: String) { def canRemove(): Boolean = { val currentTime = System.currentTimeMillis() val idleTime = currentTime - this.lastAccessTime - idleTime > HadoopConf.HDFS_ENABLE_CACHE_MAX_TIME || (System - .currentTimeMillis() - this.lastAccessTime > HadoopConf.HDFS_ENABLE_CACHE_IDLE_TIME) && count <= 0 + idleTime > HadoopConf.HDFS_ENABLE_CACHE_MAX_TIME || ((idleTime > HadoopConf.HDFS_ENABLE_CACHE_IDLE_TIME) && count <= 0) } } diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala index f2a615e996..3ebbbc33ba 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala @@ -31,21 +31,29 @@ import org.apache.hadoop.security.UserGroupInformation import java.io.File import java.nio.file.Paths import java.security.PrivilegedExceptionAction -import java.util.concurrent.TimeUnit +import java.util.concurrent.{ConcurrentHashMap, TimeUnit} +import java.util.concurrent.atomic.AtomicLong import scala.collection.JavaConverters._ object HDFSUtils extends Logging { private val fileSystemCache: java.util.Map[String, HDFSFileSystemContainer] = - new java.util.HashMap[String, HDFSFileSystemContainer]() + new ConcurrentHashMap[String, HDFSFileSystemContainer]() private val LOCKER_SUFFIX = "_HDFS" private val DEFAULT_CACHE_LABEL = "default" private val JOINT = "_" - if (HadoopConf.HDFS_ENABLE_CACHE) { - logger.info("HDFS Cache enabled ") + private val count = new AtomicLong + + /** + * For FS opened with public tenants, we should not perform close action, but should close only + * when hdfsfilesystem encounters closed problem + * 对于使用公共租户开启的FS,我们不应该去执行close动作,应该由hdfsfilesystem遇到closed问题时才进行关闭 + */ + if (HadoopConf.HDFS_ENABLE_CACHE && HadoopConf.HDFS_ENABLE_CACHE_CLOSE) { + logger.info("HDFS Cache clear enabled ") Utils.defaultScheduler.scheduleAtFixedRate( new Runnable { override def run(): Unit = Utils.tryAndWarn { @@ -58,8 +66,7 @@ object HDFSUtils extends Logging { ) } .foreach { hdfsFileSystemContainer => - val locker = - hdfsFileSystemContainer.getUser + JOINT + hdfsFileSystemContainer.getLabel + LOCKER_SUFFIX + val locker = hdfsFileSystemContainer.getUser + LOCKER_SUFFIX locker.intern() synchronized { if (hdfsFileSystemContainer.canRemove()) { fileSystemCache.remove( @@ -121,43 +128,75 @@ object HDFSUtils extends Logging { ) def getHDFSRootUserFileSystem(conf: org.apache.hadoop.conf.Configuration): FileSystem = - getHDFSUserFileSystem(HADOOP_ROOT_USER.getValue, conf) - - def getHDFSUserFileSystem(userName: String): FileSystem = - getHDFSUserFileSystem(userName, getConfiguration(userName)) + getHDFSUserFileSystem(HADOOP_ROOT_USER.getValue, null, conf) + + /** + * If the cache switch is turned on, fs will be obtained from the cache first + * @param userName + * @return + */ + def getHDFSUserFileSystem(userName: String): FileSystem = { + getHDFSUserFileSystem(userName, null) + } - def getHDFSUserFileSystem( - userName: String, - conf: org.apache.hadoop.conf.Configuration - ): FileSystem = getHDFSUserFileSystem(userName, null, conf) + def getHDFSUserFileSystem(userName: String, label: String): FileSystem = { + + if (HadoopConf.HDFS_ENABLE_CACHE) { + val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label + val cacheKey = userName + JOINT + cacheLabel + val locker = userName + LOCKER_SUFFIX + locker.intern().synchronized { + if (fileSystemCache.containsKey(cacheKey)) { + val hdfsFileSystemContainer = fileSystemCache.get(cacheKey) + hdfsFileSystemContainer.addAccessCount() + hdfsFileSystemContainer.updateLastAccessTime + hdfsFileSystemContainer.getFileSystem + } else { + getHDFSUserFileSystem(userName, label, getConfiguration(userName, label)) + } + } + } else { + getHDFSUserFileSystem(userName, label, getConfiguration(userName, label)) + } + } def getHDFSUserFileSystem( userName: String, label: String, conf: org.apache.hadoop.conf.Configuration - ): FileSystem = if (HadoopConf.HDFS_ENABLE_CACHE) { - val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label - val cacheKey = userName + JOINT + cacheLabel - val locker = cacheKey + LOCKER_SUFFIX - locker.intern().synchronized { - val hdfsFileSystemContainer = if (fileSystemCache.containsKey(cacheKey)) { - fileSystemCache.get(cacheKey) - } else { - // we use cacheLabel to create HDFSFileSystemContainer, and in the rest part of HDFSUtils, we consistently - // use the same cacheLabel to operate HDFSFileSystemContainer, like close or remove. - // At the same time, we don't want to change the behavior of createFileSystem which is out of HDFSUtils, - // so we continue to use the original label to createFileSystem. - val newHDFSFileSystemContainer = - new HDFSFileSystemContainer(createFileSystem(userName, label, conf), userName, cacheLabel) - fileSystemCache.put(cacheKey, newHDFSFileSystemContainer) - newHDFSFileSystemContainer + ): FileSystem = { + + if (HadoopConf.FS_CACHE_DISABLE.getValue && null != conf) { + conf.set("fs.hdfs.impl.disable.cache", "true") + } + if (HadoopConf.HDFS_ENABLE_CACHE) { + val locker = userName + LOCKER_SUFFIX + val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label + val cacheKey = userName + JOINT + cacheLabel + locker.intern().synchronized { + val hdfsFileSystemContainer = if (fileSystemCache.containsKey(cacheKey)) { + fileSystemCache.get(cacheKey) + } else { + // we use cacheLabel to create HDFSFileSystemContainer, and in the rest part of HDFSUtils, we consistently + // use the same cacheLabel to operate HDFSFileSystemContainer, like close or remove. + // At the same time, we don't want to change the behavior of createFileSystem which is out of HDFSUtils, + // so we continue to use the original label to createFileSystem. + val newHDFSFileSystemContainer = + new HDFSFileSystemContainer( + createFileSystem(userName, label, conf), + userName, + cacheLabel + ) + fileSystemCache.put(cacheKey, newHDFSFileSystemContainer) + newHDFSFileSystemContainer + } + hdfsFileSystemContainer.addAccessCount() + hdfsFileSystemContainer.updateLastAccessTime + hdfsFileSystemContainer.getFileSystem } - hdfsFileSystemContainer.addAccessCount() - hdfsFileSystemContainer.updateLastAccessTime - hdfsFileSystemContainer.getFileSystem + } else { + createFileSystem(userName, label, conf) } - } else { - createFileSystem(userName, label, conf) } def createFileSystem(userName: String, conf: org.apache.hadoop.conf.Configuration): FileSystem = @@ -167,16 +206,19 @@ object HDFSUtils extends Logging { userName: String, label: String, conf: org.apache.hadoop.conf.Configuration - ): FileSystem = + ): FileSystem = { + val createCount = count.getAndIncrement() + logger.info(s"user ${userName} to create Fs, create time ${createCount}") getUserGroupInformation(userName, label) .doAs(new PrivilegedExceptionAction[FileSystem] { - // scalastyle:off FileSystemGet - def run: FileSystem = FileSystem.get(conf) - // scalastyle:on FileSystemGet + def run: FileSystem = FileSystem.newInstance(conf) }) + } def closeHDFSFIleSystem(fileSystem: FileSystem, userName: String): Unit = - closeHDFSFIleSystem(fileSystem, userName, null, false) + if (null != fileSystem && StringUtils.isNotBlank(userName)) { + closeHDFSFIleSystem(fileSystem, userName, null, false) + } def closeHDFSFIleSystem(fileSystem: FileSystem, userName: String, label: String): Unit = closeHDFSFIleSystem(fileSystem, userName, label, false) @@ -191,23 +233,28 @@ object HDFSUtils extends Logging { isForce: Boolean ): Unit = if (null != fileSystem && StringUtils.isNotBlank(userName)) { - if (HadoopConf.HDFS_ENABLE_CACHE) { + val locker = userName + LOCKER_SUFFIX + if (HadoopConf.HDFS_ENABLE_CACHE) locker.intern().synchronized { val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label val cacheKey = userName + JOINT + cacheLabel val hdfsFileSystemContainer = fileSystemCache.get(cacheKey) - if (null != hdfsFileSystemContainer) { - val locker = cacheKey + LOCKER_SUFFIX + if ( + null != hdfsFileSystemContainer && fileSystem == hdfsFileSystemContainer.getFileSystem + ) { if (isForce) { - locker synchronized fileSystemCache.remove(cacheKey) + fileSystemCache.remove(hdfsFileSystemContainer.getUser) IOUtils.closeQuietly(hdfsFileSystemContainer.getFileSystem) logger.info( s"user${hdfsFileSystemContainer.getUser} to Force remove hdfsFileSystemContainer" ) } else { - locker synchronized hdfsFileSystemContainer.minusAccessCount() + hdfsFileSystemContainer.minusAccessCount() } + } else { + IOUtils.closeQuietly(fileSystem) } - } else { + } + else { IOUtils.closeQuietly(fileSystem) } } @@ -240,7 +287,7 @@ object HDFSUtils extends Logging { def isKerberosEnabled(label: String): Boolean = { if (label == null) { - KERBEROS_ENABLE.getValue + KERBEROS_ENABLE } else { kerberosValueMapParser(KERBEROS_ENABLE_MAP.getValue).get(label).contains("true") } diff --git a/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java b/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java index b84988a74a..5b29e1f482 100644 --- a/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java +++ b/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java @@ -28,7 +28,7 @@ public class KerberosUtilsTest { public void getKerberosRefreshIntervalTest() { Long refreshInterval = KerberosUtils.getKerberosRefreshInterval(); - Assertions.assertTrue(86400000L == refreshInterval.longValue()); + Assertions.assertTrue(43200L == refreshInterval.longValue()); } @Test diff --git a/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala b/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala index 44ca1dabcb..7c2c7b3835 100644 --- a/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala +++ b/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala @@ -26,7 +26,7 @@ class HadoopConfTest { def constTest(): Unit = { Assertions.assertEquals("hadoop", HadoopConf.HADOOP_ROOT_USER.getValue) - Assertions.assertFalse(HadoopConf.KERBEROS_ENABLE.getValue) + Assertions.assertFalse(HadoopConf.KERBEROS_ENABLE) Assertions.assertEquals("/appcom/keytab/", HadoopConf.KEYTAB_FILE.getValue) Assertions.assertEquals("127.0.0.1", HadoopConf.KEYTAB_HOST.getValue) Assertions.assertFalse(HadoopConf.KEYTAB_HOST_ENABLED.getValue) diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala index ec61ebd66d..5e42540102 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala @@ -59,12 +59,21 @@ import org.apache.http.conn.{ ConnectTimeoutException, HttpHostConnectException } +import org.apache.http.conn.ssl.{SSLConnectionSocketFactory, TrustSelfSignedStrategy} import org.apache.http.entity.{ContentType, StringEntity} import org.apache.http.entity.mime.MultipartEntityBuilder -import org.apache.http.impl.client.{BasicCookieStore, CloseableHttpClient, HttpClients} +import org.apache.http.impl.client.{ + BasicCookieStore, + CloseableHttpClient, + HttpClientBuilder, + HttpClients +} import org.apache.http.message.BasicNameValuePair +import org.apache.http.ssl.SSLContextBuilder import org.apache.http.util.EntityUtils +import javax.net.ssl.{HostnameVerifier, SSLContext, SSLSession} + import java.net.URI import java.nio.charset.Charset import java.util @@ -81,12 +90,26 @@ abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String protected val cookieStore = new BasicCookieStore - protected val httpClient: CloseableHttpClient = HttpClients + private val httpClientBuilder: HttpClientBuilder = HttpClients .custom() .setDefaultCookieStore(cookieStore) .setMaxConnTotal(clientConfig.getMaxConnection) .setMaxConnPerRoute(clientConfig.getMaxConnection / 2) - .build + + protected val httpClient: CloseableHttpClient = if (clientConfig.isSSL) { + val sslContext: SSLContext = + SSLContextBuilder.create.loadTrustMaterial(null, new TrustSelfSignedStrategy).build + + val sslConnectionFactory = new SSLConnectionSocketFactory( + sslContext, + new HostnameVerifier() { + override def verify(hostname: String, session: SSLSession) = true + } + ) + httpClientBuilder.setSSLSocketFactory(sslConnectionFactory).build() + } else { + httpClientBuilder.build() + } if (clientConfig.getAuthenticationStrategy != null) { clientConfig.getAuthenticationStrategy match { @@ -150,12 +173,12 @@ abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String s"invoke ${req.getURI} get status ${response.getStatusLine.getStatusCode} taken: ${costTime}." ) if (response.getStatusLine.getStatusCode == 401) { - tryLogin(action, getRequestUrl(action), true) val msg = Utils.tryCatch(EntityUtils.toString(response.getEntity)) { t => logger.warn("failed to parse entity", t) "" } IOUtils.closeQuietly(response) + tryLogin(action, getRequestUrl(action), true) if (attempts.size() <= 1) { logger.info("The user is not logged in, default retry once") addAttempt() diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala index c45de8f466..30f04999c5 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala @@ -54,6 +54,9 @@ abstract class AbstractAuthenticationStrategy extends AuthenticationStrategy wit getKeyByUserAndURL(user, serverUrl) } + protected def getAuthenticationActionByKey(key: String): Authentication = + userNameToAuthentications.get(key) + def setClientConfig(clientConfig: ClientConfig): Unit = this.clientConfig = clientConfig def getClientConfig: ClientConfig = clientConfig @@ -61,16 +64,14 @@ abstract class AbstractAuthenticationStrategy extends AuthenticationStrategy wit def login(requestAction: Action, serverUrl: String): Authentication = { val key = getKey(requestAction, serverUrl) if (key == null) return null - if ( - userNameToAuthentications - .containsKey(key) && !isTimeout(userNameToAuthentications.get(key)) - ) { - val authenticationAction = userNameToAuthentications.get(key) + val oldAuth = getAuthenticationActionByKey(key) + if (null != oldAuth && !isTimeout(oldAuth)) { + val authenticationAction = oldAuth authenticationAction.updateLastAccessTime() authenticationAction } else { key.intern() synchronized { - var authentication = userNameToAuthentications.get(key) + var authentication = getAuthenticationActionByKey(key) if (authentication == null || isTimeout(authentication)) { authentication = tryLogin(requestAction, serverUrl) putSession(key, authentication) diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala index e40a10cd83..18e7dddd0c 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala @@ -25,4 +25,6 @@ trait Authentication { def updateLastAccessTime(): Unit + def getCreateTime: Long = System.currentTimeMillis() + } diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala index dbce2d32a2..dea081bd3b 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala @@ -44,6 +44,7 @@ class ClientConfig private () { private var maxConnection: Int = 20 private var retryEnabled: Boolean = _ private var retryHandler: RetryHandler = _ + private var ssl: Boolean = false protected[config] def this( serverUrl: String, @@ -59,7 +60,8 @@ class ClientConfig private () { retryEnabled: Boolean, retryHandler: RetryHandler, authTokenKey: String, - authTokenValue: String + authTokenValue: String, + isSSL: Boolean = false ) = { this() this.serverUrl = serverUrl @@ -78,6 +80,7 @@ class ClientConfig private () { this.retryHandler = retryHandler this.authTokenKey = authTokenKey this.authTokenValue = authTokenValue + this.ssl = isSSL authenticationStrategy match { case ab: AbstractAuthenticationStrategy => ab.setClientConfig(this) case _ => @@ -123,4 +126,6 @@ class ClientConfig private () { def getRetryHandler: RetryHandler = retryHandler + def isSSL: Boolean = ssl + } diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala index b1fc579f3c..a574b89fb5 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala @@ -40,6 +40,8 @@ class ClientConfigBuilder protected () { protected var maxConnection: Int = _ protected var retryEnabled: Boolean = true + protected var ssl: Boolean = false + protected var retryHandler: RetryHandler = { val retryHandler = new DefaultRetryHandler retryHandler.addRetryException(classOf[LinkisRetryException]) @@ -112,6 +114,11 @@ class ClientConfigBuilder protected () { this } + def setSSL(isSSL: Boolean): this.type = { + this.ssl = isSSL + this + } + def build(): ClientConfig = new ClientConfig( serverUrl, discoveryEnabled, @@ -126,7 +133,8 @@ class ClientConfigBuilder protected () { retryEnabled, retryHandler, authTokenKey, - authTokenValue + authTokenValue, + ssl ) } diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml index d4ffc38e2c..cb8d23c095 100644 --- a/linkis-commons/linkis-module/pom.xml +++ b/linkis-commons/linkis-module/pom.xml @@ -64,10 +64,6 @@ - - org.springframework.cloud - spring-cloud-starter-netflix-eureka-client - org.springframework.boot @@ -266,6 +262,17 @@ jedis ${jedis.version} + + org.springframework.cloud + spring-cloud-openfeign-core + + + + org.springframework.retry + spring-retry + 1.3.4 + + @@ -277,4 +284,80 @@ + + + eureka + + true + + discovery + eureka + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + nacos + + + discovery + nacos + + + + + com.alibaba.cloud + spring-cloud-starter-alibaba-nacos-discovery + + + org.springframework.boot + * + + + org.springframework.cloud + spring-cloud-commons + + + org.springframework.cloud + spring-cloud-context + + + org.springframework.boot + spring-boot-starter + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.google.code.findbugs + jsr305 + + + org.yaml + snakeyaml + + + io.prometheus + simpleclient + + + com.google.guava + guava + + + + + + + diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java index 6f0256fdfa..10ab8f9268 100644 --- a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java @@ -26,8 +26,6 @@ import org.apache.linkis.server.conf.ServerConfiguration; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.springframework.boot.SpringApplication; import org.springframework.boot.WebApplicationType; @@ -41,6 +39,7 @@ import org.springframework.cloud.client.discovery.EnableDiscoveryClient; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.cloud.context.scope.refresh.RefreshScopeRefreshedEvent; +import org.springframework.cloud.openfeign.EnableFeignClients; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; @@ -49,6 +48,7 @@ import org.springframework.core.env.Environment; import org.springframework.core.env.PropertySource; import org.springframework.core.env.StandardEnvironment; +import org.springframework.retry.annotation.EnableRetry; import org.springframework.web.filter.CharacterEncodingFilter; import javax.servlet.DispatcherType; @@ -62,12 +62,16 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.webapp.WebAppContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @SpringBootApplication(scanBasePackages = {"org.apache.linkis", "com.webank.wedatasphere"}) @EnableDiscoveryClient @RefreshScope +@EnableFeignClients +@EnableRetry public class DataWorkCloudApplication extends SpringBootServletInitializer { - private static final Log logger = LogFactory.getLog(DataWorkCloudApplication.class); + private static final Logger logger = LoggerFactory.getLogger(DataWorkCloudApplication.class); private static ConfigurableApplicationContext applicationContext; private static ServiceInstance serviceInstance; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultMetaData.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/proxy/ProxyUserService.java similarity index 84% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultMetaData.java rename to linkis-commons/linkis-module/src/main/java/org/apache/linkis/proxy/ProxyUserService.java index 04f835ac01..cb4e6eecca 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultMetaData.java +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/proxy/ProxyUserService.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.storage.resultset; +package org.apache.linkis.proxy; -import org.apache.linkis.common.io.MetaData; +public interface ProxyUserService { -public interface ResultMetaData extends MetaData {} + ProxyUserEntity getProxyUserEntity(String proxyUser, String loginUser); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java similarity index 68% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java rename to linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java index 96d28a185b..c9c52fc430 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java @@ -15,18 +15,17 @@ * limitations under the License. */ -package org.apache.linkis.manager.am.conf; +package org.apache.linkis.server; -import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; -import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.web.servlet.config.annotation.InterceptorRegistry; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; @Configuration -public class ApplicationManagerSpringConfiguration { +public class InterceptorConfigure implements WebMvcConfigurer { - @ConditionalOnMissingBean - @Bean - public EngineConnConfigurationService getDefaultEngineConnConfigurationService() { - return new DefaultEngineConnConfigurationService(); + @Override + public void addInterceptors(InterceptorRegistry registry) { + registry.addInterceptor(new PerformanceInterceptor()); } } diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java new file mode 100644 index 0000000000..2a9cb2dd02 --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.server; + +import org.apache.linkis.utils.LinkisSpringUtils; + +import org.springframework.web.servlet.HandlerInterceptor; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class PerformanceInterceptor implements HandlerInterceptor { + + private static final Logger logger = LoggerFactory.getLogger(PerformanceInterceptor.class); + + @Override + public boolean preHandle( + HttpServletRequest request, HttpServletResponse response, Object handler) { + request.setAttribute("Linkis_startTime", System.currentTimeMillis()); + return true; + } + + @Override + public void afterCompletion( + HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) { + Object startObject = request.getAttribute("Linkis_startTime"); + if (null != startObject) { + long startTime = (Long) startObject; + long endTime = System.currentTimeMillis(); + long executeTime = endTime - startTime; + logger.info( + "Request client address:{} request URL: {} Method: {} taken: {} ms", + LinkisSpringUtils.getClientIP(request), + request.getRequestURI(), + request.getMethod(), + executeTime); + } + } +} diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java index 3661a66cb8..0c4b7dea05 100644 --- a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java @@ -113,4 +113,8 @@ public static String getTokenUser(HttpServletRequest httpServletRequest) { } return tokenUser; } + + public static void printAuditLog(String auditLogMsg) { + LOGGER.info(auditLogMsg); + } } diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java new file mode 100644 index 0000000000..f07b7ed0a6 --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.swagger; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.context.annotation.Configuration; +import org.springframework.util.ReflectionUtils; +import org.springframework.web.servlet.mvc.method.RequestMappingInfoHandlerMapping; + +import java.lang.reflect.Field; +import java.util.List; +import java.util.stream.Collectors; + +import springfox.documentation.spring.web.plugins.WebFluxRequestHandlerProvider; +import springfox.documentation.spring.web.plugins.WebMvcRequestHandlerProvider; + +@Configuration +public class SwaggerBeanPostProcessor implements BeanPostProcessor { + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + if (bean instanceof WebMvcRequestHandlerProvider + || bean instanceof WebFluxRequestHandlerProvider) { + List handlerMappings = getHandlerMappings(bean); + customizeSpringfoxHandlerMappings(handlerMappings); + } + return bean; + } + + private void customizeSpringfoxHandlerMappings( + List mappings) { + List copy = + mappings.stream() + .filter(mapping -> mapping.getPatternParser() == null) + .collect(Collectors.toList()); + mappings.clear(); + mappings.addAll(copy); + } + + @SuppressWarnings("unchecked") + private List getHandlerMappings(Object bean) { + try { + Field field = ReflectionUtils.findField(bean.getClass(), "handlerMappings"); + field.setAccessible(true); + return (List) field.get(bean); + } catch (IllegalArgumentException | IllegalAccessException e) { + throw new IllegalStateException(e); + } + } +} diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java new file mode 100644 index 0000000000..8021bb1191 --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.utils; + +import javax.servlet.http.HttpServletRequest; + +public class LinkisSpringUtils { + + public static String getClientIP(HttpServletRequest request) { + String clientIp = request.getHeader("X-Forwarded-For"); + + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("Proxy-Client-IP"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("WL-Proxy-Client-IP"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("HTTP_CLIENT_IP"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("HTTP_X_FORWARDED_FOR"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getRemoteAddr(); + } + if (clientIp != null && clientIp.contains(",")) { + clientIp = clientIp.split(",")[0]; + } + + return clientIp; + } +} diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala index c454aaacb2..23d07bdf90 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala @@ -47,9 +47,7 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2WebMvc * 4, in your browser,add dataworkcloud_inner_request=true, bdp-user-ticket-id's value and workspaceId's value into cookie * */ -@EnableSwagger2WebMvc -@EnableKnife4j -@Configuration + class Knife4jConfig extends WebMvcConfigurer { @Value("${spring.application.name}") private var appName = "linkis service" diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala index 582568e626..ed6c680648 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala @@ -38,7 +38,7 @@ object ServerConfiguration extends Logging { val BDP_SERVER_SPRING_APPLICATION_LISTENERS = CommonVars("wds.linkis.server.spring.application.listeners", "") - val BDP_SERVER_VERSION: String = CommonVars("wds.linkis.server.version", "").getValue + val BDP_SERVER_VERSION: String = CommonVars("wds.linkis.server.version", "v1").getValue if (StringUtils.isBlank(BDP_SERVER_VERSION)) { throw new BDPInitServerException( @@ -207,4 +207,7 @@ object ServerConfiguration extends Logging { val LINKIS_SERVER_SESSION_PROXY_TICKETID_KEY = CommonVars("wds.linkis.session.proxy.user.ticket.key", "linkis_user_session_proxy_ticket_id_v1") + val LINKIS_SERVER_ENTRANCE_HEADER_KEY = + CommonVars("linkis.server.entrance.header.key", "jobInstanceKey") + } diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala index 8f09139e0e..73699f38ef 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala @@ -70,7 +70,7 @@ object RedisClient { SessionHAConfiguration.RedisHost, SessionHAConfiguration.RedisPort, redisTimeout, - SessionHAConfiguration.RedisSentinalServer + SessionHAConfiguration.RedisPassword ) } diff --git a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java index 8def4f6a10..6c0a4508bc 100644 --- a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java +++ b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java @@ -24,6 +24,7 @@ import javax.sql.DataSource; import com.alibaba.druid.pool.DruidDataSource; +import com.alibaba.druid.pool.vendor.MySqlValidConnectionChecker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,6 +72,11 @@ public static DataSource buildDataSource(String dbUrl, String username, String p MybatisConfiguration.MYBATIS_DATASOURCE_REMOVE_ABANDONED_ENABLED.getValue(); int removeAbandonedTimeout = MybatisConfiguration.MYBATIS_DATASOURCE_REMOVE_ABANDONED_TIMEOUT.getValue(); + + boolean jdbcKeepAlive = MybatisConfiguration.MYBATIS_DATASOURCE_KEEPALIVE_ENABLED.getValue(); + + boolean jdbcUsePingMethod = MybatisConfiguration.MYBATIS_DATASOURCE_USE_PING_ENABLED.getValue(); + DruidDataSource datasource = new DruidDataSource(); logger.info("Database connection address information(数据库连接地址信息)=" + dbUrl); datasource.setUrl(dbUrl); @@ -87,6 +93,16 @@ public static DataSource buildDataSource(String dbUrl, String username, String p datasource.setTestWhileIdle(testWhileIdle); datasource.setTestOnBorrow(testOnBorrow); datasource.setTestOnReturn(testOnReturn); + + datasource.setKeepAlive(jdbcKeepAlive); + + if (!jdbcUsePingMethod) { + // use test sql for keepalive + MySqlValidConnectionChecker checker = new MySqlValidConnectionChecker(); + checker.setUsePingMethod(false); + datasource.setValidConnectionChecker(checker); + } + datasource.setPoolPreparedStatements(poolPreparedStatements); datasource.setRemoveAbandoned(removeAbandoned); datasource.setRemoveAbandonedTimeout(removeAbandonedTimeout); diff --git a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java index e127b9fd97..d200ab2e09 100644 --- a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java +++ b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java @@ -63,6 +63,11 @@ public class MybatisConfiguration { CommonVars.apply("wds.linkis.server.mybatis.datasource.poolPreparedStatements", Boolean.TRUE); public static final CommonVars MYBATIS_DATASOURCE_REMOVE_ABANDONED_ENABLED = CommonVars.apply("wds.linkis.server.mybatis.remove.abandoned.enabled", Boolean.TRUE); + public static final CommonVars MYBATIS_DATASOURCE_KEEPALIVE_ENABLED = + CommonVars.apply("linkis.server.mybatis.keepalive.enabled", Boolean.TRUE); + public static final CommonVars MYBATIS_DATASOURCE_USE_PING_ENABLED = + CommonVars.apply("linkis.server.mybatis.use.ping.enabled", Boolean.TRUE); + public static final CommonVars MYBATIS_DATASOURCE_REMOVE_ABANDONED_TIMEOUT = CommonVars.apply("wds.linkis.server.mybatis.remove.abandoned.timeout", 300); public static final CommonVars BDP_SERVER_MYBATIS_PAGEHELPER_DIALECT = diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java index 6eb97c84d9..48d9bb4846 100644 --- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java +++ b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java @@ -63,12 +63,13 @@ public interface TaskConstant { String JOB_MEMORY_PERCENT = "memoryPercent"; String JOB_CORE_RGB = "coreRGB"; String JOB_MEMORY_RGB = "memoryRGB"; - + String JOB_IS_REUSE = "isReuse"; String JOB_ENGINECONN_MAP = "engineconnMap"; String ENGINE_INSTANCE = "engineInstance"; String TICKET_ID = "ticketId"; String ENGINE_CONN_TASK_ID = "engineConnTaskId"; String ENGINE_CONN_SUBMIT_TIME = "engineConnSubmitTime"; + String FAILOVER_FLAG = "failoverFlag"; String DEBUG_ENBALE = "debug.enable"; String PARAMS_DATA_SOURCE = "dataSources"; diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/UserWithCreator.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/UserWithCreator.scala new file mode 100644 index 0000000000..cebaf3b9b2 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/UserWithCreator.scala @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.protocol + +case class UserWithCreator(user: String, creator: String) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala new file mode 100644 index 0000000000..5e2eb10a59 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.protocol.engine + +case class JobInstance( + status: String, + instances: String, + jobReqId: String, + createTimestamp: Long, + instanceRegistryTimestamp: Long +) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala index 9b2be16ef7..3affc351d9 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala @@ -45,6 +45,14 @@ object TaskUtils { } } else params.put(key, waitToAdd) + private def clearMap(params: util.Map[String, AnyRef], key: String): Unit = + if (params != null && params.containsKey(key)) { + params.get(key) match { + case map: util.Map[String, AnyRef] => map.clear() + case _ => params.put(key, new util.HashMap[String, AnyRef]()) + } + } + private def getConfigurationMap( params: util.Map[String, AnyRef], key: String @@ -84,13 +92,20 @@ object TaskUtils { def addStartupMap(params: util.Map[String, AnyRef], startupMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, startupMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + def clearStartupMap(params: util.Map[String, AnyRef]): Unit = { + val configurationMap = getMap(params, TaskConstant.PARAMS_CONFIGURATION) + if (!configurationMap.isEmpty) { + clearMap(configurationMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + } + } + def addRuntimeMap(params: util.Map[String, AnyRef], runtimeMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, runtimeMap, TaskConstant.PARAMS_CONFIGURATION_RUNTIME) def addSpecialMap(params: util.Map[String, AnyRef], specialMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, specialMap, TaskConstant.PARAMS_CONFIGURATION_SPECIAL) - // tdoo + // todo def getLabelsMap(params: util.Map[String, AnyRef]): util.Map[String, AnyRef] = getMap(params, TaskConstant.LABELS) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala index 95c7a81873..ad30484c46 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala @@ -23,7 +23,7 @@ object ZuulEntranceUtils { private val INSTANCE_SPLIT_TOKEN = "_" - private val EXEC_ID = "exec_id" + val EXEC_ID = "exec_id" private val SPLIT_LEN = 3 diff --git a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala b/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala deleted file mode 100644 index d9fc07b6c0..0000000000 --- a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -import org.junit.jupiter.api.{Assertions, DisplayName, Test} - -class RequestEngineStatusTest { - - @Test - @DisplayName("constTest") - def constTest(): Unit = { - - val statusOnly = RequestEngineStatus.Status_Only - val statusOverload = RequestEngineStatus.Status_Overload - val statusConcurrent = RequestEngineStatus.Status_Concurrent - val statusOverloadConcurrent = RequestEngineStatus.Status_Overload_Concurrent - val statusBasicInfo = RequestEngineStatus.Status_BasicInfo - val all = RequestEngineStatus.ALL - - Assertions.assertTrue(1 == statusOnly) - Assertions.assertTrue(2 == statusOverload) - Assertions.assertTrue(3 == statusConcurrent) - Assertions.assertTrue(4 == statusOverloadConcurrent) - Assertions.assertTrue(5 == statusBasicInfo) - Assertions.assertTrue(6 == all) - - } - -} diff --git a/linkis-commons/linkis-rpc/pom.xml b/linkis-commons/linkis-rpc/pom.xml index 70ef8e6bc7..a3354e0e0e 100644 --- a/linkis-commons/linkis-rpc/pom.xml +++ b/linkis-commons/linkis-rpc/pom.xml @@ -40,8 +40,17 @@ org.springframework.cloud spring-cloud-commons + + org.springframework.cloud + spring-cloud-loadbalancer + + + org.springframework.cloud + spring-cloud-loadbalancer + ${spring-cloud-common.version} + org.springframework.cloud spring-cloud-starter-openfeign diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java index 3723ca145d..5aabaccea2 100644 --- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java +++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java @@ -28,8 +28,14 @@ public enum LinkisRpcErrorCodeSummary implements LinkisErrorCode { 10003, "The corresponding anti-sequence class was not found:{0}(找不到对应的反序列类:{0})"), CORRESPONDING_TO_INITIALIZE( 10004, "The corresponding anti-sequence class:{0} failed to initialize(对应的反序列类:{0} 初始化失败)"), + CORRESPONDING_CLASS_ILLEGAL( + 10005, "The corresponding anti-sequence class:{0} is illegal (对应的反序列类:{0} 不合法)"), APPLICATION_IS_NOT_EXISTS( 10051, "The instance:{0} of application {1} does not exist(应用程序:{0} 的实例:{1} 不存在)."), + + INSTANCE_ERROR(10052, "The instance:{0} is error should ip:port."), + + INSTANCE_NOT_FOUND_ERROR(10053, "The instance:{0} is not found."), RPC_INIT_ERROR(10054, "Asyn RPC Consumer Thread has stopped!(Asyn RPC Consumer 线程已停止!)"); /** 错误码 */ diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java index f022fc8c7f..93762d1f30 100644 --- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java +++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java @@ -17,8 +17,6 @@ package org.apache.linkis.rpc.message.utils; -import org.springframework.cloud.openfeign.ribbon.LoadBalancerFeignClient; - import java.lang.reflect.Field; import feign.Request.Options; @@ -32,7 +30,7 @@ public class LoadBalancerOptionsUtils { public static Options getDefaultOptions() throws NoSuchFieldException, IllegalAccessException { if (null == DEFAULT_OPTIONS) { synchronized (locker) { - Class clazz = LoadBalancerFeignClient.class; + Class clazz = null; Field optionField = clazz.getDeclaredField("DEFAULT_OPTIONS"); optionField.setAccessible(true); Object o = optionField.get(clazz); diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala index 85beb87732..daea47106b 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala @@ -40,6 +40,10 @@ class MessageReceiver extends Receiver with Logging { logger.info("From caller {} get sync message", RPCUtils.getServiceInstanceFromSender(sender)) message match { case requestProtocol: RequestProtocol => + logger.info( + "With message requestProtocol class name:{}", + requestProtocol.getClass.getSimpleName + ) val methodExecuteWrapper = receiverMethodSearcher.getMethodExecuteWrappers(requestProtocol) messageExecutor.execute(requestProtocol, methodExecuteWrapper, sender) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala index aa92605f9b..00fa019d99 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala @@ -19,7 +19,7 @@ package org.apache.linkis.rpc import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.Logging -import org.apache.linkis.rpc.interceptor.{RPCInterceptor, RPCLoadBalancer, RPCServerLoader} +import org.apache.linkis.rpc.interceptor.{RPCInterceptor, RPCServerLoader} import org.apache.linkis.rpc.interceptor.common.BroadcastSenderBuilder import java.util @@ -30,7 +30,6 @@ private[rpc] object RPCSpringBeanCache extends Logging { import DataWorkCloudApplication.getApplicationContext private var beanNameToReceivers: util.Map[String, Receiver] = _ private var rpcInterceptors: Array[RPCInterceptor] = _ - private var rpcLoadBalancers: Array[RPCLoadBalancer] = _ private var rpcServerLoader: RPCServerLoader = _ private var senderBuilders: Array[BroadcastSenderBuilder] = _ private var rpcReceiveRestful: RPCReceiveRestful = _ @@ -83,18 +82,6 @@ private[rpc] object RPCSpringBeanCache extends Logging { rpcInterceptors } - private[rpc] def getRPCLoadBalancers: Array[RPCLoadBalancer] = { - if (rpcLoadBalancers == null) { - rpcLoadBalancers = getApplicationContext - .getBeansOfType(classOf[RPCLoadBalancer]) - .asScala - .map(_._2) - .toArray - .sortBy(_.order) - } - rpcLoadBalancers - } - private[rpc] def getRPCServerLoader: RPCServerLoader = { if (rpcServerLoader == null) { rpcServerLoader = getApplicationContext.getBean(classOf[RPCServerLoader]) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala index bbe2d4acd3..dd52687f71 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala @@ -97,13 +97,17 @@ object RPCConfiguration { val SERVICE_SCAN_PACKAGE: String = CommonVars("wds.linkis.ms.service.scan.package", "org.apache.linkis").getValue - val ENABLE_SPRING_PARAMS: Boolean = - CommonVars("wds.linkis.rpc.spring.params.enable", false).getValue - // unit is HOUR val SENDER_CACHE_CLEANING_HOUR = CommonVars("linkis.rpc.sender.cache.cleaning.time.hour", 6).getValue + // unit is HOUR + val RPC_RETRY_NUMBER = + CommonVars("linkis.rpc.retry.number", 5).getValue + + val RPC_RETRY_PERIOD = + CommonVars[Long]("linkis.rpc.retry.period", 30000L).getValue + val REFLECTIONS = new Reflections( SERVICE_SCAN_PACKAGE, new MethodAnnotationsScanner(), @@ -114,6 +118,15 @@ object RPCConfiguration { val BDP_RPC_CACHE_CONF_EXPIRE_TIME: CommonVars[Long] = CommonVars("wds.linkis.rpc.cache.expire.time", 120000L) + val ENABLE_SPRING_PARAMS: Boolean = + CommonVars("wds.linkis.rpc.spring.params.enable", false).getValue + + val RPC_READ_TIME_OUT: Int = + CommonVars[Int]("spring.ribbon.ReadTimeout", 100000).getValue + + val RPC_CONNECT_TIME_OUT: Int = + CommonVars[Int]("spring.ribbon.ConnectTimeout", 100000).getValue + val CONTEXT_SERVICE_REQUEST_PREFIX = "contextservice" val CONTEXT_SERVICE_NAME: String = @@ -126,4 +139,17 @@ object RPCConfiguration { CONTEXT_SERVICE_APPLICATION_NAME.getValue } + val configOptions: feign.Request.Options = + new feign.Request.Options(RPC_CONNECT_TIME_OUT, RPC_READ_TIME_OUT, true) + + val RPC_OBJECT_PREFIX_WHITE_LIST: Array[String] = + CommonVars( + "wds.linkis.rpc.object.class.prefix.whitelist", + "org.apache.linkis,com.webank.wedatasphere,com.wedatasphere" + ).getValue + .split(",") + + val ENABLE_RPC_OBJECT_PREFIX_WHITE_LIST_CHECK: Boolean = + CommonVars("wds.linkis.rpc.object.class.prefix.whitelist.check.enable", true).getValue + } diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala index 8cab6d7d0f..e4259466b1 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala @@ -28,16 +28,11 @@ import java.text.MessageFormat import scala.collection.JavaConverters._ import scala.concurrent.duration.Duration -import com.netflix.loadbalancer.{DynamicServerListLoadBalancer, ILoadBalancer, Server} - trait RPCServerLoader { @throws[NoInstanceExistsException] def getOrRefreshServiceInstance(serviceInstance: ServiceInstance): Unit - @throws[NoInstanceExistsException] - def getServer(lb: ILoadBalancer, serviceInstance: ServiceInstance): Server - def getServiceInstances(applicationName: String): Array[ServiceInstance] } @@ -50,19 +45,12 @@ abstract class AbstractRPCServerLoader extends RPCServerLoader with Logging { def refreshAllServers(): Unit - protected def refreshServerList(lb: ILoadBalancer): Unit = { - refreshAllServers() - lb match { - case d: DynamicServerListLoadBalancer[_] => d.updateListOfServers() - case _ => - } - } - private def getOrRefresh( refresh: => Unit, refreshed: => Boolean, serviceInstance: ServiceInstance ): Unit = { + val instanceNotExists = new NoInstanceExistsException( APPLICATION_IS_NOT_EXISTS.getErrorCode, MessageFormat.format( @@ -101,15 +89,6 @@ abstract class AbstractRPCServerLoader extends RPCServerLoader with Logging { serviceInstance ) - override def getServer(lb: ILoadBalancer, serviceInstance: ServiceInstance): Server = { - getOrRefresh( - refreshServerList(lb), - lb.getAllServers.asScala.exists(_.getHostPort == serviceInstance.getInstance), - serviceInstance - ) - lb.getAllServers.asScala.find(_.getHostPort == serviceInstance.getInstance).get - } - def getDWCServiceInstance(serviceInstance: SpringCloudServiceInstance): ServiceInstance override def getServiceInstances(applicationName: String): Array[ServiceInstance] = diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala deleted file mode 100644 index 6cdac0df9f..0000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.interceptor.common - -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.protocol.{InstanceProtocol, Protocol} -import org.apache.linkis.rpc.interceptor.RPCLoadBalancer - -import org.springframework.stereotype.Component - -import com.netflix.loadbalancer.ILoadBalancer - -@Component -class InstanceRPCLoadBalancer extends RPCLoadBalancer { - override val order: Int = 10 - - override def choose( - protocol: Protocol, - originService: ServiceInstance, - lb: ILoadBalancer - ): Option[ServiceInstance] = protocol match { - case instance: InstanceProtocol => - instance.choseInstance.map(ServiceInstance(originService.getApplicationName, _)) - case _ => None - } - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala index 4faeaa180e..0fc2a39100 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.RetryHandler import org.apache.linkis.protocol.RetryableProtocol +import org.apache.linkis.rpc.conf.RPCConfiguration import org.apache.linkis.rpc.exception.DWCRPCRetryException import org.apache.linkis.rpc.interceptor.{ RPCInterceptor, @@ -34,7 +35,7 @@ import org.apache.commons.lang3.StringUtils import org.springframework.stereotype.Component -import java.net.ConnectException +import java.net.{ConnectException, SocketTimeoutException} import feign.RetryableException @@ -42,37 +43,28 @@ import feign.RetryableException class RetryableRPCInterceptor extends RPCInterceptor { override val order: Int = 20 -// private val commonRetryHandler = new RPCRetryHandler -// commonRetryHandler.setRetryInfo(new RetryableProtocol{}) -// -// private def isCommonRetryHandler(retry: RetryableProtocol): Boolean = retry.maxPeriod == commonRetryHandler.getRetryMaxPeriod && -// retry.period == commonRetryHandler.getRetryPeriod && retry.retryNum == commonRetryHandler.getRetryNum && -// (retry.retryExceptions.isEmpty || commonRetryHandler.getRetryExceptions.containsSlice(retry.retryExceptions)) - override def intercept( interceptorExchange: RPCInterceptorExchange, chain: RPCInterceptorChain ): Any = interceptorExchange.getProtocol match { case retry: RetryableProtocol => val retryName = retry.getClass.getSimpleName -// if(isCommonRetryHandler(retry)) commonRetryHandler.retry(chain.handle(interceptorExchange), retryName) -// else { val retryHandler = new RPCRetryHandler retryHandler.setRetryInfo(retry, chain) retryHandler.retry(chain.handle(interceptorExchange), retryName) -// } case _ => chain.handle(interceptorExchange) } class RPCRetryHandler extends RetryHandler { addRetryException(classOf[ConnectException]) addRetryException(classOf[RetryableException]) + addRetryException(classOf[SocketTimeoutException]) private var serviceInstance: Option[ServiceInstance] = None def setRetryInfo(retry: RetryableProtocol, chain: RPCInterceptorChain): Unit = { - setRetryNum(retry.retryNum) - setRetryPeriod(retry.period) - setRetryMaxPeriod(retry.maxPeriod) + setRetryNum(RPCConfiguration.RPC_RETRY_NUMBER) + setRetryPeriod(RPCConfiguration.RPC_RETRY_PERIOD) + setRetryMaxPeriod(RPCConfiguration.RPC_RETRY_PERIOD * 2) retry.retryExceptions.foreach(addRetryException) chain match { case s: ServiceInstanceRPCInterceptorChain => diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala deleted file mode 100644 index b007838ea6..0000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.interceptor.common - -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.protocol.{Protocol, SingleInstanceProtocol} -import org.apache.linkis.rpc.interceptor.RPCLoadBalancer - -import org.apache.commons.lang3.StringUtils - -import org.springframework.stereotype.Component - -import com.netflix.loadbalancer.ILoadBalancer - -@Component -class SingleInstanceRPCLoadBalancer extends RPCLoadBalancer with Logging { - override val order: Int = 20 - - override def choose( - protocol: Protocol, - originService: ServiceInstance, - lb: ILoadBalancer - ): Option[ServiceInstance] = protocol match { - case _: SingleInstanceProtocol => - if (StringUtils.isEmpty(originService.getInstance)) synchronized { - if (StringUtils.isEmpty(originService.getInstance)) { - val servers = lb.getAllServers - val server = servers.get((math.random * servers.size()).toInt) - originService.setInstance(server.getHostPort) - logger.warn( - originService.getApplicationName + " choose " + server.getHostPort + " to build a single instance connection." - ) - } - } - Some(originService) - case _ => None - } - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala index 06f13c70a9..b8b41524d5 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala @@ -24,7 +24,7 @@ import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.autoconfigure.AutoConfigureBefore import org.springframework.cloud.client.discovery.DiscoveryClient import org.springframework.cloud.client.loadbalancer.LoadBalancedRetryFactory -import org.springframework.cloud.netflix.ribbon.SpringClientFactory +import org.springframework.cloud.loadbalancer.support.LoadBalancerClientFactory import org.springframework.cloud.openfeign.FeignClientsConfiguration import org.springframework.context.annotation.{Configuration, Import} @@ -48,7 +48,7 @@ class SpringCloudFeignConfigurationCache( private var discoveryClient: DiscoveryClient = _ @Autowired - private var clientFactory: SpringClientFactory = _ + private var loadBalancerClientFactory: LoadBalancerClientFactory = _ @Autowired(required = false) private var loadBalancedRetryFactory: LoadBalancedRetryFactory = _ @@ -56,7 +56,7 @@ class SpringCloudFeignConfigurationCache( @PostConstruct def storeFeignConfiguration(): Unit = { SpringCloudFeignConfigurationCache.client = client - SpringCloudFeignConfigurationCache.clientFactory = clientFactory + SpringCloudFeignConfigurationCache.loadBalancerClientFactory = loadBalancerClientFactory SpringCloudFeignConfigurationCache.loadBalancedRetryFactory = loadBalancedRetryFactory SpringCloudFeignConfigurationCache.contract = contract SpringCloudFeignConfigurationCache.decoder = decoder @@ -71,7 +71,9 @@ private[linkis] object SpringCloudFeignConfigurationCache { private[SpringCloudFeignConfigurationCache] var decoder: Decoder = _ private[SpringCloudFeignConfigurationCache] var contract: Contract = _ private[SpringCloudFeignConfigurationCache] var client: Client = _ - private[SpringCloudFeignConfigurationCache] var clientFactory: SpringClientFactory = _ + + private[SpringCloudFeignConfigurationCache] var loadBalancerClientFactory + : LoadBalancerClientFactory = _ private[SpringCloudFeignConfigurationCache] var loadBalancedRetryFactory : LoadBalancedRetryFactory = _ @@ -92,7 +94,7 @@ private[linkis] object SpringCloudFeignConfigurationCache { client } - private[rpc] def getClientFactory = clientFactory + private[rpc] def getLoadloadBalancerClientFactory = loadBalancerClientFactory private[rpc] def getLoadBalancedRetryFactory = loadBalancedRetryFactory private[linkis] def getDiscoveryClient = { diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala index ab4f2d7fe3..9bb2fdea96 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala @@ -18,38 +18,20 @@ package org.apache.linkis.rpc.sender import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.common.conf.{Configuration => DWCConfiguration} -import org.apache.linkis.protocol.Protocol +import org.apache.linkis.common.utils.Logging import org.apache.linkis.rpc.{BaseRPCSender, RPCMessageEvent, RPCSpringBeanCache} import org.apache.linkis.rpc.conf.RPCConfiguration -import org.apache.linkis.rpc.interceptor.{ - RPCInterceptor, - RPCLoadBalancer, - ServiceInstanceRPCInterceptorChain -} -import org.apache.linkis.rpc.message.utils.LoadBalancerOptionsUtils -import org.apache.linkis.rpc.transform.RPCConsumer -import org.apache.linkis.server.{BDPJettyServerHelper, Message} +import org.apache.linkis.rpc.interceptor.{RPCInterceptor, ServiceInstanceRPCInterceptorChain} +import org.apache.linkis.server.conf.ServerConfiguration import org.apache.commons.lang3.StringUtils -import org.springframework.cloud.netflix.ribbon.ServerIntrospector -import org.springframework.cloud.openfeign.ribbon.{ - CachingSpringLoadBalancerFactory, - FeignLoadBalancer, - LoadBalancerFeignClient -} - -import java.lang.reflect.Field - -import com.netflix.client.ClientRequest -import com.netflix.client.config.IClientConfig -import com.netflix.loadbalancer.reactive.LoadBalancerCommand import feign._ private[rpc] class SpringMVCRPCSender private[rpc] ( private[rpc] val serviceInstance: ServiceInstance -) extends BaseRPCSender(serviceInstance.getApplicationName) { +) extends BaseRPCSender(serviceInstance.getApplicationName) + with Logging { import SpringCloudFeignConfigurationCache._ @@ -59,77 +41,40 @@ private[rpc] class SpringMVCRPCSender private[rpc] ( override protected def createRPCInterceptorChain() = new ServiceInstanceRPCInterceptorChain(0, getRPCInterceptors, serviceInstance) - protected def getRPCLoadBalancers: Array[RPCLoadBalancer] = - RPCSpringBeanCache.getRPCLoadBalancers - + /** + * If it's a random call, you don't need to set target specify instance,need to specify target and + * do not set client setting + * @param builder + */ override protected def doBuilder(builder: Feign.Builder): Unit = { - val client = getClient.asInstanceOf[LoadBalancerFeignClient] - val newClient = new LoadBalancerFeignClient( - client.getDelegate, - new CachingSpringLoadBalancerFactory(getClientFactory) { - override def create(clientName: String): FeignLoadBalancer = { - val serverIntrospector = - getClientFactory.getInstance(clientName, classOf[ServerIntrospector]) - new FeignLoadBalancer( - getClientFactory.getLoadBalancer(clientName), - getClientFactory.getClientConfig(clientName), - serverIntrospector - ) { - override def customizeLoadBalancerCommandBuilder( - request: FeignLoadBalancer.RibbonRequest, - config: IClientConfig, - builder: LoadBalancerCommand.Builder[FeignLoadBalancer.RibbonResponse] - ): Unit = { - val instance = - if (getRPCLoadBalancers.isEmpty) None - else { - val requestBody = SpringMVCRPCSender.getRequest(request).body() - val requestStr = new String(requestBody, DWCConfiguration.BDP_ENCODING.getValue) - val obj = RPCConsumer.getRPCConsumer.toObject( - BDPJettyServerHelper.gson.fromJson(requestStr, classOf[Message]) - ) - obj match { - case protocol: Protocol => - var serviceInstance: Option[ServiceInstance] = None - for (lb <- getRPCLoadBalancers if serviceInstance.isEmpty) - serviceInstance = lb.choose( - protocol, - SpringMVCRPCSender.this.serviceInstance, - getLoadBalancer - ) - serviceInstance.foreach(f => - logger.info( - "origin serviceInstance: " + SpringMVCRPCSender.this.serviceInstance + ", chose serviceInstance: " + f - ) - ) // TODO just for test - serviceInstance - case _ => None - } - } - instance - .orElse(Option(SpringMVCRPCSender.this.serviceInstance)) - .filter(s => StringUtils.isNotBlank(s.getInstance)) - .foreach { serviceInstance => - val server = RPCSpringBeanCache.getRPCServerLoader - .getServer(getLoadBalancer, serviceInstance) - builder.withServer(server) - } - } - } + if (serviceInstance != null && StringUtils.isNotBlank(serviceInstance.getInstance)) { + builder.requestInterceptor(new RequestInterceptor() { + def apply(template: RequestTemplate): Unit = { + template.target( + s"http://${serviceInstance.getInstance}${ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue}" + ) } - }, - getClientFactory - ) - if (RPCConfiguration.ENABLE_SPRING_PARAMS) { - builder.options(LoadBalancerOptionsUtils.getDefaultOptions) + }) } super.doBuilder(builder) - builder - .contract(getContract) - .encoder(getEncoder) - .decoder(getDecoder) - .client(newClient) - .requestInterceptor(getRPCTicketIdRequestInterceptor) + if (RPCConfiguration.ENABLE_SPRING_PARAMS) { + builder.options(RPCConfiguration.configOptions) + } + if (StringUtils.isBlank(serviceInstance.getInstance)) { + builder + .contract(getContract) + .encoder(getEncoder) + .decoder(getDecoder) + .client(getClient) + .requestInterceptor(getRPCTicketIdRequestInterceptor) + } else { + builder + .contract(getContract) + .encoder(getEncoder) + .decoder(getDecoder) + .requestInterceptor(getRPCTicketIdRequestInterceptor) + } + } /** @@ -160,18 +105,3 @@ private[rpc] class SpringMVCRPCSender private[rpc] ( } else s"RPCSender($getApplicationName, ${serviceInstance.getInstance})" } - -private object SpringMVCRPCSender { - private var requestField: Field = _ - - def getRequest(req: ClientRequest): Request = { - if (requestField == null) synchronized { - if (requestField == null) { - requestField = req.getClass.getDeclaredField("request") - requestField.setAccessible(true) - } - } - requestField.get(req).asInstanceOf[Request] - } - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala index e7d48305ac..5ee3b1ca48 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala @@ -25,13 +25,14 @@ import org.apache.linkis.rpc.sender.{SpringCloudFeignConfigurationCache, SpringM import org.apache.commons.lang3.StringUtils +import org.springframework.cloud.client.loadbalancer.RetryableStatusCodeException + import java.lang.reflect.UndeclaredThrowableException import java.net.ConnectException import java.util.Locale import scala.collection.JavaConverters._ -import com.netflix.client.ClientException import feign.RetryableException object RPCUtils { @@ -53,11 +54,10 @@ object RPCUtils { } case t: RuntimeException => t.getCause match { - case client: ClientException => - StringUtils.isNotBlank(client.getErrorMessage) && - client.getErrorMessage.contains( - "Load balancer does not have available server for client" - ) + // case client: ClientException => + case client: RetryableStatusCodeException => + StringUtils.isNotBlank(client.getMessage) && + client.getMessage.contains("Load balancer does not have available server for client") case _ => false } case _ => false diff --git a/linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java b/linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java deleted file mode 100644 index d265371d60..0000000000 --- a/linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.message.utils; - -import feign.Request; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class LoadBalancerOptionsUtilsTest { - - @Test - @DisplayName("getDefaultOptionsTest") - public void getDefaultOptionsTest() throws NoSuchFieldException, IllegalAccessException { - - Request.Options defaultOptions = LoadBalancerOptionsUtils.getDefaultOptions(); - Assertions.assertNotNull(defaultOptions); - } -} diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala index de2b81bcaa..8126ac8847 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala @@ -17,14 +17,15 @@ package org.apache.linkis.scheduler -import org.apache.linkis.common.utils.Utils +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.scheduler.conf.SchedulerConfiguration import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ import org.apache.linkis.scheduler.exception.SchedulerErrorException import org.apache.linkis.scheduler.queue.SchedulerEvent import org.apache.commons.lang3.StringUtils -abstract class AbstractScheduler extends Scheduler { +abstract class AbstractScheduler extends Scheduler with Logging { override def init(): Unit = {} override def start(): Unit = {} @@ -52,6 +53,14 @@ abstract class AbstractScheduler extends Scheduler { val group = getSchedulerContext.getOrCreateGroupFactory.getOrCreateGroup(event) val consumer = getSchedulerContext.getOrCreateConsumerManager.getOrCreateConsumer(group.getGroupName) + logger.info( + s"Consumer ${consumer.getGroup.getGroupName} running size ${consumer.getRunningSize} waiting size ${consumer.getWaitingSize}" + ) + if (consumer.getWaitingSize >= SchedulerConfiguration.MAX_GROUP_ALTER_WAITING_SIZE) { + logger.warn( + s"Group waiting size exceed max alter waiting size ${consumer.getWaitingSize} group name ${consumer.getGroup.getGroupName}" + ) + } val index = consumer.getConsumeQueue.offer(event) index.map(getEventId(_, group.getGroupName)).foreach(event.setId) if (index.isEmpty) { diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala index 8fd6f1c6f0..e3b76ac4e7 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala @@ -25,12 +25,15 @@ object SchedulerConfiguration { CommonVars("wds.linkis.fifo.consumer.auto.clear.enabled", true) val FIFO_CONSUMER_MAX_IDLE_TIME = - CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("1h")).getValue.toLong + CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("10m")).getValue.toLong val FIFO_CONSUMER_IDLE_SCAN_INTERVAL = - CommonVars("wds.linkis.fifo.consumer.idle.scan.interval", new TimeType("2h")) + CommonVars("wds.linkis.fifo.consumer.idle.scan.interval", new TimeType("30m")) val FIFO_CONSUMER_IDLE_SCAN_INIT_TIME = CommonVars("wds.linkis.fifo.consumer.idle.scan.init.time", new TimeType("1s")) + val MAX_GROUP_ALTER_WAITING_SIZE = + CommonVars("linkis.fifo.consumer.group.max.alter.waiting.size", 1000).getValue + } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala index 6e9ecbd26f..b123682b56 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala @@ -23,11 +23,18 @@ abstract class AbstractGroup extends Group { private var _status: GroupStatus = _ private var maxRunningJobs: Int = _ + private var maxAllowRunningJobs: Int = 0 private var maxAskExecutorTimes: Long = 0L def setMaxRunningJobs(maxRunningJobs: Int): Unit = this.maxRunningJobs = maxRunningJobs def getMaxRunningJobs: Int = maxRunningJobs + def setMaxAllowRunningJobs(maxAllowRunningJobs: Int): Unit = this.maxAllowRunningJobs = + maxAllowRunningJobs + + def getMaxAllowRunningJobs: Int = + if (maxAllowRunningJobs <= 0) maxRunningJobs else Math.min(maxAllowRunningJobs, maxRunningJobs) + def setMaxAskExecutorTimes(maxAskExecutorTimes: Long): Unit = this.maxAskExecutorTimes = maxAskExecutorTimes diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala index 14c9061777..7761a9f33b 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala @@ -21,6 +21,8 @@ abstract class ConsumeQueue { def remove(event: SchedulerEvent): Unit def getWaitingEvents: Array[SchedulerEvent] def size: Int + + def waitingSize: Int def isEmpty: Boolean def isFull: Boolean def clearAll(): Unit diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala index 165a274362..539a2a4b1f 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala @@ -38,6 +38,12 @@ abstract class Consumer(schedulerContext: SchedulerContext, executeService: Exec def getRunningEvents: Array[SchedulerEvent] + def getMaxRunningEvents: Int + + def getRunningSize: Int + + def getWaitingSize: Int + def start(): Unit def shutdown(): Unit = { diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala index 2087153813..d513ecc050 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala @@ -196,23 +196,15 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg ): Unit = toState match { case Inited => jobListener.foreach(_.onJobInited(this)) - // TODO Add event(加事件) case Scheduled => jobListener.foreach(_.onJobScheduled(this)) - logListener.foreach(_.onLogUpdate(this, LogUtils.generateInfo("job is scheduled."))) - // TODO Add event(加事件) case Running => jobListener.foreach(_.onJobRunning(this)) - logListener.foreach(_.onLogUpdate(this, LogUtils.generateInfo("job is running."))) - // TODO job start event case WaitForRetry => jobListener.foreach(_.onJobWaitForRetry(this)) case _ => jobDaemon.foreach(_.kill()) jobListener.foreach(_.onJobCompleted(this)) -// if(getJobInfo != null) logListener.foreach(_.onLogUpdate(this, getJobInfo.getMetric)) - logListener.foreach(_.onLogUpdate(this, LogUtils.generateInfo("job is completed."))) - // TODO job end event } protected def transitionCompleted(executeCompleted: CompletedExecuteResponse): Unit = { @@ -303,6 +295,7 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg } override def run(): Unit = { + Thread.currentThread().setName(s"Job_${toString}_Thread") if (!isScheduled || interrupt) return startTime = System.currentTimeMillis Utils.tryAndWarn(transition(Running)) @@ -351,6 +344,16 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg } override def toString: String = if (StringUtils.isNotBlank(getName)) getName else getId + + /** + * clear job memory + */ + def clear(): Unit = { + logger.info(s" clear job base info $getId") + this.executor = null + this.jobDaemon = null + } + } /** diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala index 8bea7e52b1..c18f18de12 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala @@ -109,7 +109,7 @@ class LoopArrayQueue(var group: Group) extends ConsumeQueue with Logging { max } - def waitingSize: Int = if (takeIndex <= realSize) size + override def waitingSize: Int = if (takeIndex <= realSize) size else { val length = size - takeIndex + realSize if (length < 0) 0 else length diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala index 4edc1d5d17..26087d99f0 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala @@ -38,4 +38,8 @@ object SchedulerEventState extends Enumeration { SchedulerEventState.withName(jobState) ) + def isInitedByStr(jobState: String): Boolean = SchedulerEventState.withName(jobState) == Inited + + def isRunningByStr(jobState: String): Boolean = isRunning(SchedulerEventState.withName(jobState)) + } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala index d541d8a2eb..b4ffbfa4e3 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala @@ -21,12 +21,14 @@ import org.apache.linkis.common.exception.{ErrorException, WarnException} import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.Utils import org.apache.linkis.scheduler.SchedulerContext +import org.apache.linkis.scheduler.conf.SchedulerConfiguration import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ import org.apache.linkis.scheduler.exception.SchedulerErrorException import org.apache.linkis.scheduler.executer.Executor import org.apache.linkis.scheduler.future.{BDPFuture, BDPFutureTask} import org.apache.linkis.scheduler.queue._ +import java.util import java.util.concurrent.{ExecutorService, Future} import scala.beans.BeanProperty @@ -122,9 +124,10 @@ class FIFOUserConsumer( } var event: Option[SchedulerEvent] = getWaitForRetryEvent if (event.isEmpty) { - val completedNums = runningJobs.filter(job => job == null || job.isCompleted) - if (completedNums.length < 1) { - Utils.tryQuietly(Thread.sleep(1000)) + val maxAllowRunningJobs = fifoGroup.getMaxAllowRunningJobs + val currentRunningJobs = runningJobs.count(e => e != null && !e.isCompleted) + if (maxAllowRunningJobs <= currentRunningJobs) { + Utils.tryQuietly(Thread.sleep(1000)) // TODO 还可以优化,通过实现JobListener进行优化 return } while (event.isEmpty) { @@ -176,6 +179,9 @@ class FIFOUserConsumer( totalDuration ) job.consumerFuture = null + logger.info( + s"FIFOUserConsumer ${getGroup.getGroupName} running size ${getRunningSize} waiting size ${getWaitingSize}" + ) executor.foreach { executor => job.setExecutor(executor) job.future = executeService.submit(job) @@ -207,6 +213,19 @@ class FIFOUserConsumer( runningJobs(index) = job } + protected def scanAllRetryJobsAndRemove(): util.List[Job] = { + val jobs = new util.ArrayList[Job]() + for (index <- runningJobs.indices) { + val job = runningJobs(index) + if (job != null && job.isJobCanRetry) { + jobs.add(job) + runningJobs(index) = null + logger.info(s"Job $job can retry, remove from runningJobs") + } + } + jobs + } + override def shutdown(): Unit = { future.cancel(true) val waitEvents = queue.getWaitingEvents @@ -217,6 +236,8 @@ class FIFOUserConsumer( case _ => } } + // clear cache + queue.clearAll() this.runningJobs.foreach { job => if (job != null && !job.isCompleted) { @@ -238,4 +259,14 @@ class FIFOUserConsumer( this.queue.peek.isEmpty && !this.runningJobs.exists(job => job != null && !job.isCompleted) } + override def getMaxRunningEvents: Int = this.maxRunningJobsNum + + override def getRunningSize: Int = { + runningJobs.count(job => job != null && !job.isCompleted) + } + + override def getWaitingSize: Int = { + queue.waitingSize + } + } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala index 396b6fb315..c64158e6e8 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala @@ -27,6 +27,11 @@ import java.util.concurrent.{ExecutorService, TimeUnit} import scala.collection.mutable +/** + * @param maxParallelismUsers + * Consumer Thread pool size is:5 * maxParallelismUsers + 1 + * @param schedulerName + */ class ParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) extends ConsumerManager with Logging { @@ -126,8 +131,8 @@ class ParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) override def destroyConsumer(groupName: String): Unit = consumerGroupMap.get(groupName).foreach { tmpConsumer => - tmpConsumer.shutdown() - consumerGroupMap.remove(groupName) + Utils.tryAndWarn(tmpConsumer.shutdown()) + Utils.tryAndWarn(consumerGroupMap.remove(groupName)) consumerListener.foreach(_.onConsumerDestroyed(tmpConsumer)) logger.warn(s"Consumer of group ($groupName) in $schedulerName is destroyed.") } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala index 99fa57bad4..5b060a994f 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala @@ -25,8 +25,14 @@ class ParallelSchedulerContextImpl(override val maxParallelismUsers: Int) extends FIFOSchedulerContextImpl(maxParallelismUsers) with Logging { + /** + * Set the number of consumption groups supported The number of concurrency supported by each + * group is determined by + * org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroupFactory#setDefaultMaxRunningJobs(int) + */ override protected def createGroupFactory(): GroupFactory = { val groupFactory = new ParallelGroupFactory + groupFactory.setParallelism(maxParallelismUsers) groupFactory } diff --git a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala index 97172dab94..2e7069d6fe 100644 --- a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala +++ b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala @@ -34,9 +34,8 @@ class SchedulerConfigurationTest { fifoConsumerAutoClearEnabled ) val fifoConsumerMaxIdleTime = - CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("1h")).getValue.toLong + CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("10m")).getValue.toLong assertEquals(SchedulerConfiguration.FIFO_CONSUMER_MAX_IDLE_TIME, fifoConsumerMaxIdleTime) - assertEquals(SchedulerConfiguration.FIFO_CONSUMER_IDLE_SCAN_INTERVAL.getValue.toLong, 7200000) val fifoConsumerIdleScanInitTime = CommonVars("wds.linkis.fifo.consumer.idle.scan.init.time", new TimeType("1s")).getValue.toLong assertEquals( diff --git a/linkis-commons/linkis-storage/pom.xml b/linkis-commons/linkis-storage/pom.xml index def795ebd8..52f9ede595 100644 --- a/linkis-commons/linkis-storage/pom.xml +++ b/linkis-commons/linkis-storage/pom.xml @@ -76,7 +76,12 @@ com.github.pjfanning excel-streaming-reader - 4.0.5 + 4.3.0 + + + + org.apache.commons + commons-compress @@ -99,6 +104,52 @@ aws-java-sdk-s3 1.12.261 + + + org.apache.parquet + parquet-avro + ${parquet-avro.version} + ${storage.parquet.scope} + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + ${storage.parquet.scope} + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j + + + + + org.apache.orc + orc-core + ${orc-core.version} + nohive + ${storage.orc.scope} + + + org.apache.hive + hive-storage-api + + + + diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java deleted file mode 100644 index 34a92ead85..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.factory.BuildFactory; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.text.MessageFormat; -import java.util.Map; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_FILE; - -public class FSFactory { - private static final Map buildClasses = - StorageUtils.loadClass( - StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue(), t -> t.fsName()); - - public static BuildFactory getBuildFactory(String fsName) { - if (!buildClasses.containsKey(fsName)) { - throw new StorageWarnException( - UNSUPPORTED_FILE.getErrorCode(), - MessageFormat.format(UNSUPPORTED_FILE.getErrorDesc(), fsName)); - } - return buildClasses.get(fsName); - } - - public static Fs getFs(String fsType, String proxyUser) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fsType).getFs(user, proxyUser); - } - - public static Fs getFs(String fsType) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fsType).getFs(user, user); - } - - /** - * 1. If this machine has shared storage, the file:// type FS obtained here is the FS of the - * process user. 2. If this machine does not have shared storage, then the file:// type FS - * obtained is the proxy to the Remote (shared storage machine root) FS. 3. If it is HDFS, it - * returns the FS of the process user. 1、如果这台机器装有共享存储则这里获得的file://类型的FS为该进程用户的FS - * 2、如果这台机器没有共享存储则获得的file://类型的FS为代理到Remote(共享存储机器root)的FS 3、如果是HDFS则返回的就是该进程用户的FS - * - * @param fsPath - * @return - */ - public static Fs getFs(FsPath fsPath) { - return getFs(fsPath.getFsType()); - } - - /** - * 1. If the process user is passed and the proxy user and the process user are consistent, the - * file:// type FS is the FS of the process user (the shared storage exists) 2. If the process - * user is passed and the proxy user and the process user are consistent and there is no shared - * storage, the file:// type FS is the proxy to the remote (shared storage machine root) FS 3. If - * the passed proxy user and process user are consistent, the hdfs type is the FS of the process - * user. 4. If the proxy user and the process user are inconsistent, the hdfs type is the FS after - * the proxy. - * - * @param fsPath - * @param proxyUser - * @return - */ - public static Fs getFsByProxyUser(FsPath fsPath, String proxyUser) { - return getFs(fsPath.getFsType(), proxyUser); - } - - public Fs getFSByLabel(String fs, String label) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fs).getFs(user, user, label); - } - - public Fs getFSByLabelAndUser(String fs, String label, String proxy) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fs).getFs(user, proxy, label); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java deleted file mode 100644 index 74950c15fe..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.conf; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.utils.ByteTimeUtils; - -import org.apache.commons.lang3.StringUtils; - -public class LinkisStorageConf { - private static final Object CONF_LOCK = new Object(); - - public static final String HDFS_FILE_SYSTEM_REST_ERRS = - CommonVars.apply( - "wds.linkis.hdfs.rest.errs", - ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*") - .getValue(); - - public static final String ROW_BYTE_MAX_LEN_STR = - CommonVars.apply("wds.linkis.resultset.row.max.str", "2m").getValue(); - - public static final long ROW_BYTE_MAX_LEN = ByteTimeUtils.byteStringAsBytes(ROW_BYTE_MAX_LEN_STR); - - public static final String FILE_TYPE = - CommonVars.apply( - "wds.linkis.storage.file.type", - "dolphin,sql,scala,py,hql,python,out,log,text,txt,sh,jdbc,ngql,psql,fql,tsql") - .getValue(); - - private static volatile String[] fileTypeArr = null; - - private static String[] fileTypeArrParser(String fileType) { - if (StringUtils.isBlank(fileType)) { - return new String[0]; - } else { - return fileType.split(","); - } - } - - public static String[] getFileTypeArr() { - if (fileTypeArr == null) { - synchronized (CONF_LOCK) { - if (fileTypeArr == null) { - fileTypeArr = fileTypeArrParser(FILE_TYPE); - } - } - } - return fileTypeArr; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java deleted file mode 100644 index d98be40337..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.csv; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageCSVWriter extends CSVFsWriter { - private static final Logger logger = LoggerFactory.getLogger(StorageCSVWriter.class); - - private final String charset; - private final String separator; - private final boolean quoteRetouchEnable; - private final OutputStream outputStream; - - private final String delimiter; - private final StringBuilder buffer; - - public StorageCSVWriter( - String charset, String separator, boolean quoteRetouchEnable, OutputStream outputStream) { - this.charset = charset; - this.separator = separator; - this.quoteRetouchEnable = quoteRetouchEnable; - this.outputStream = outputStream; - - if (StringUtils.isBlank(separator)) { - this.delimiter = "\t"; - } else { - switch (separator) { - case "t": - this.delimiter = "\t"; - break; - default: - this.delimiter = separator; - break; - } - } - this.buffer = new StringBuilder(50000); - } - - @Override - public String getCharset() { - return charset; - } - - @Override - public String getSeparator() { - return separator; - } - - @Override - public boolean isQuoteRetouchEnable() { - return quoteRetouchEnable; - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - Column[] columns = ((TableMetaData) metaData).getColumns(); - String[] head = Stream.of(columns).map(Column::getColumnName).toArray(String[]::new); - write(head); - } - - private String compact(String[] row) { - String quotationMarks = "\""; - String dealNewlineSymbolMarks = "\n"; - StringBuilder rowBuilder = new StringBuilder(); - for (String value : row) { - String decoratedValue = value; - if (StringUtils.isNotBlank(value)) { - if (quoteRetouchEnable) { - decoratedValue = quotationMarks + value.replaceAll(quotationMarks, "") + quotationMarks; - } - decoratedValue = decoratedValue.replaceAll(dealNewlineSymbolMarks, " "); - logger.debug("decorateValue with input: {} output: {} ", value, decoratedValue); - } - rowBuilder.append(decoratedValue).append(delimiter); - } - if (rowBuilder.length() > 0 && rowBuilder.toString().endsWith(delimiter)) { - int index = rowBuilder.lastIndexOf(delimiter); - rowBuilder.delete(index, index + delimiter.length()); - } - rowBuilder.append("\n"); - if (logger.isDebugEnabled()) { - logger.debug("delimiter:" + delimiter); - } - return rowBuilder.toString(); - } - - private void write(String[] row) throws IOException { - String content = compact(row); - if (buffer.length() + content.length() > 49500) { - IOUtils.write(buffer.toString().getBytes(charset), outputStream); - buffer.setLength(0); - } - buffer.append(content); - } - - @Override - public void addRecord(Record record) throws IOException { - Object[] rows = ((TableRecord) record).row; - String[] body = - Stream.of(rows).map(dataType -> DataType.valueToString(dataType)).toArray(String[]::new); - write(body); - } - - @Override - public void flush() throws IOException { - IOUtils.write(buffer.toString().getBytes(charset), outputStream); - buffer.setLength(0); - } - - @Override - public void close() throws IOException { - flush(); - IOUtils.closeQuietly(outputStream); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java deleted file mode 100644 index 6808f693ec..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -import java.math.BigDecimal; -import java.sql.Date; -import java.sql.Timestamp; -import java.util.Optional; -import java.util.regex.Pattern; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.domain.Dolphin.LINKIS_NULL; - -public enum DataType { - NullType("void", 0), - StringType("string", 12), - BooleanType("boolean", 16), - TinyIntType("tinyint", -6), - ShortIntType("short", 5), - IntType("int", 4), - LongType("long", -5), - BigIntType("bigint", -5), - FloatType("float", 6), - DoubleType("double", 8), - CharType("char", 1), - VarcharType("varchar", 12), - DateType("date", 91), - TimestampType("timestamp", 93), - BinaryType("binary", -2), - DecimalType("decimal", 3), - ArrayType("array", 2003), - MapType("map", 2000), - ListType("list", 2001), - StructType("struct", 2002), - BigDecimalType("bigdecimal", 3); - - private final String typeName; - private final int javaSQLType; - - DataType(String typeName, int javaSQLType) { - this.typeName = typeName; - this.javaSQLType = javaSQLType; - } - - private static Logger logger = LoggerFactory.getLogger(DataType.class); - - public static final String NULL_VALUE = "NULL"; - public static final String LOWCASE_NULL_VALUE = "null"; - - // TODO Change to fine-grained regular expressions(改为精细化正则表达式) - public static final Pattern DECIMAL_REGEX = - Pattern.compile("^decimal\\(\\s*\\d*\\s*,\\s*\\d*\\s*\\)"); - - public static final Pattern SHORT_REGEX = Pattern.compile("^short.*"); - public static final Pattern INT_REGEX = Pattern.compile("^int.*"); - public static final Pattern LONG_REGEX = Pattern.compile("^long.*"); - public static final Pattern BIGINT_REGEX = Pattern.compile("^bigint.*"); - public static final Pattern FLOAT_REGEX = Pattern.compile("^float.*"); - public static final Pattern DOUBLE_REGEX = Pattern.compile("^double.*"); - - public static final Pattern VARCHAR_REGEX = Pattern.compile("^varchar.*"); - public static final Pattern CHAR_REGEX = Pattern.compile("^char.*"); - - public static final Pattern ARRAY_REGEX = Pattern.compile("array.*"); - - public static final Pattern MAP_REGEX = Pattern.compile("map.*"); - - public static final Pattern LIST_REGEX = Pattern.compile("list.*"); - - public static final Pattern STRUCT_REGEX = Pattern.compile("struct.*"); - - public static DataType toDataType(String dataType) { - if (dataType.equals("void") || dataType.equals("null")) { - return DataType.NullType; - } else if (dataType.equals("string")) { - return DataType.StringType; - } else if (dataType.equals("boolean")) { - return DataType.BooleanType; - } else if (SHORT_REGEX.matcher(dataType).matches()) { - return DataType.ShortIntType; - } else if (LONG_REGEX.matcher(dataType).matches()) { - return DataType.LongType; - } else if (BIGINT_REGEX.matcher(dataType).matches()) { - return DataType.BigIntType; - } else if (INT_REGEX.matcher(dataType).matches() - || dataType.equals("integer") - || dataType.equals("smallint")) { - return DataType.IntType; - } else if (FLOAT_REGEX.matcher(dataType).matches()) { - return DataType.FloatType; - } else if (DOUBLE_REGEX.matcher(dataType).matches()) { - return DataType.DoubleType; - } else if (VARCHAR_REGEX.matcher(dataType).matches()) { - return DataType.VarcharType; - } else if (CHAR_REGEX.matcher(dataType).matches()) { - return DataType.CharType; - } else if (dataType.equals("date")) { - return DataType.DateType; - } else if (dataType.equals("timestamp")) { - return DataType.TimestampType; - } else if (dataType.equals("binary")) { - return DataType.BinaryType; - } else if (dataType.equals("decimal") || DECIMAL_REGEX.matcher(dataType).matches()) { - return DataType.DecimalType; - } else if (ARRAY_REGEX.matcher(dataType).matches()) { - return DataType.ArrayType; - } else if (MAP_REGEX.matcher(dataType).matches()) { - return DataType.MapType; - } else if (LIST_REGEX.matcher(dataType).matches()) { - return DataType.ListType; - } else if (STRUCT_REGEX.matcher(dataType).matches()) { - return DataType.StructType; - } else { - return DataType.StringType; - } - } - - public static Object toValue(DataType dataType, String value) { - - Object result = null; - if (isLinkisNull(value)) { - return result; - } - try { - switch (dataType) { - case NullType: - result = null; - break; - case StringType: - case CharType: - case VarcharType: - case StructType: - case ListType: - case ArrayType: - case MapType: - result = value; - break; - case BooleanType: - result = isNumberNull(value) ? null : Boolean.valueOf(value); - break; - case ShortIntType: - result = isNumberNull(value) ? null : Short.valueOf(value); - break; - case IntType: - result = isNumberNull(value) ? null : Integer.valueOf(value); - break; - case LongType: - case BigIntType: - result = isNumberNull(value) ? null : Long.valueOf(value); - break; - case FloatType: - result = isNumberNull(value) ? null : Float.valueOf(value); - break; - case DoubleType: - result = isNumberNull(value) ? null : Double.valueOf(value); - break; - case DecimalType: - result = isNumberNull(value) ? null : new BigDecimal(value); - break; - case DateType: - result = isNumberNull(value) ? null : Date.valueOf(value); - break; - case TimestampType: - result = - isNumberNull(value) - ? null - : Optional.of(value) - .map(Timestamp::valueOf) - .map(Timestamp::toString) - .map(s -> s.endsWith(".0") ? s.substring(0, s.length() - 2) : s) - .orElse(null); - break; - case BinaryType: - result = isNull(value) ? null : value.getBytes(); - break; - default: - result = value; - } - } catch (Exception e) { - logger.debug("Failed to {} switch to dataType:", value, e); - result = value; - } - return result; - } - - public static boolean isLinkisNull(String value) { - return value == null || value.equals(LINKIS_NULL); - } - - public static boolean isNull(String value) { - return value == null || value.equals(NULL_VALUE) || value.trim().equals(""); - } - - public static boolean isNumberNull(String value) { - return value == null || value.equalsIgnoreCase(NULL_VALUE) || value.trim().equals(""); - } - - public static String valueToString(Object value) { - if (value == null) { - return LOWCASE_NULL_VALUE; - } else if (value instanceof BigDecimal) { - return ((BigDecimal) value).toPlainString(); - } else { - return value.toString(); - } - } - - public String getTypeName() { - return typeName; - } - - public int getJavaSQLType() { - return javaSQLType; - } - - @Override - public String toString() { - return typeName; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java deleted file mode 100644 index 35c71295e4..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.nio.charset.Charset; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FAILED_TO_READ_INTEGER; - -public class Dolphin { - private static final Logger logger = LoggerFactory.getLogger(Dolphin.class); - - public static final Charset CHAR_SET = - Charset.forName(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue()); - public static final String MAGIC = "dolphin"; - - public static byte[] MAGIC_BYTES = new byte[0]; - - static { - try { - MAGIC_BYTES = MAGIC.getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue()); - } catch (UnsupportedEncodingException e) { - logger.warn("Dolphin getBytes failed", e); - } - } - - public static final int MAGIC_LEN = MAGIC_BYTES.length; - - public static final String DOLPHIN_FILE_SUFFIX = ".dolphin"; - - public static final String COL_SPLIT = ","; - public static final byte[] COL_SPLIT_BYTES = COL_SPLIT.getBytes(Charset.forName("utf-8")); - public static final int COL_SPLIT_LEN = COL_SPLIT_BYTES.length; - - public static final String NULL = "NULL"; - public static final byte[] NULL_BYTES = "NULL".getBytes(Charset.forName("utf-8")); - - public static final String LINKIS_NULL = "LINKIS_NULL"; - public static final byte[] LINKIS_NULL_BYTES = LINKIS_NULL.getBytes(Charset.forName("utf-8")); - - public static final int INT_LEN = 10; - - public static final int FILE_EMPTY = 31; - - public static byte[] getBytes(Object value) { - return value.toString().getBytes(CHAR_SET); - } - - /** - * Convert a bytes array to a String content 将bytes数组转换为String内容 - * - * @param bytes - * @param start - * @param len - * @return - */ - public static String getString(byte[] bytes, int start, int len) { - return new String(bytes, start, len, Dolphin.CHAR_SET); - } - - public static String toStringValue(String value) { - if (LINKIS_NULL.equals(value)) { - return NULL; - } else { - return value; - } - } - - /** - * Read an integer value that converts the array to a byte of length 10 bytes - * 读取整数值,该值为将数组转换为10字节长度的byte - * - * @param inputStream - * @return - * @throws IOException - */ - public static int readInt(InputStream inputStream) throws IOException { - byte[] bytes = new byte[INT_LEN + 1]; - if (StorageUtils.readBytes(inputStream, bytes, INT_LEN) != INT_LEN) { - throw new StorageWarnException( - FAILED_TO_READ_INTEGER.getErrorCode(), FAILED_TO_READ_INTEGER.getErrorDesc()); - } - return Integer.parseInt(getString(bytes, 0, INT_LEN)); - } - - /** - * Print integers at a fixed length(将整数按固定长度打印) - * - * @param value - * @return - */ - public static byte[] getIntBytes(int value) { - String str = Integer.toString(value); - StringBuilder res = new StringBuilder(); - for (int i = 0; i < INT_LEN - str.length(); i++) { - res.append("0"); - } - res.append(str); - return Dolphin.getBytes(res.toString()); - } - - public static String getType(InputStream inputStream) throws IOException { - byte[] bytes = new byte[100]; - int len = StorageUtils.readBytes(inputStream, bytes, Dolphin.MAGIC_LEN + INT_LEN); - if (len == -1) return null; - return getType(Dolphin.getString(bytes, 0, len)); - } - - public static String getType(String content) { - if (content.length() < MAGIC.length() || !content.substring(0, MAGIC.length()).equals(MAGIC)) { - throw new RuntimeException( - "File header type must be dolphin, content: " + content + " is not"); - } - return Integer.toString( - Integer.parseInt(content.substring(MAGIC.length(), MAGIC.length() + INT_LEN))); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java deleted file mode 100644 index c1e16e223a..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -/** - * Engine unique Id(engine唯一的Id) - * - *

Fs type(fs类型) - * - *

Create a user to start the corresponding jvm user(创建用户为对应启动的jvm用户) - * - *

Proxy user(代理用户) - * - *

client Ip for whitelist control(ip用于白名单控制) - * - *

Method name called(调用的方法名) - * - *

Method parameter(方法参数) - */ -public class MethodEntity { - private long id; - private String fsType; - private String creatorUser; - private String proxyUser; - private String clientIp; - private String methodName; - private Object[] params; - - public MethodEntity( - long id, - String fsType, - String creatorUser, - String proxyUser, - String clientIp, - String methodName, - Object[] params) { - this.id = id; - this.fsType = fsType; - this.creatorUser = creatorUser; - this.proxyUser = proxyUser; - this.clientIp = clientIp; - this.methodName = methodName; - this.params = params; - } - - public long getId() { - return id; - } - - public void setId(long id) { - this.id = id; - } - - public String getFsType() { - return fsType; - } - - public void setFsType(String fsType) { - this.fsType = fsType; - } - - public String getCreatorUser() { - return creatorUser; - } - - public void setCreatorUser(String creatorUser) { - this.creatorUser = creatorUser; - } - - public String getProxyUser() { - return proxyUser; - } - - public void setProxyUser(String proxyUser) { - this.proxyUser = proxyUser; - } - - public String getClientIp() { - return clientIp; - } - - public void setClientIp(String clientIp) { - this.clientIp = clientIp; - } - - public String getMethodName() { - return methodName; - } - - public void setMethodName(String methodName) { - this.methodName = methodName; - } - - public Object[] getParams() { - return params; - } - - public void setParams(Object[] params) { - this.params = params; - } - - @Override - public String toString() { - return "id:" - + id - + ", methodName:" - + methodName - + ", fsType:" - + fsType - + ", creatorUser:" - + creatorUser - + ", proxyUser:" - + proxyUser - + ", clientIp:" - + clientIp; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java deleted file mode 100644 index 777b756a7f..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -import java.lang.reflect.Type; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -/** - * Serialize MethodEntity to code 序列化MethodEntity为code - * - *

Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 - * - *

Serialize a java object as a string 序列化java对象为字符串 - * - *

Deserialize a string into a java object 将字符串解序列化为java对象 - */ -public class MethodEntitySerializer { - - private static final Gson gson = - new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create(); - - /** - * Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 - * - * @param code - * @return - */ - public static MethodEntity deserializer(String code) { - return gson.fromJson(code, MethodEntity.class); - } - - /** - * Serialize MethodEntity to code 序列化MethodEntity为code - * - * @param methodEntity - * @return - */ - public static String serializer(MethodEntity methodEntity) { - return gson.toJson(methodEntity); - } - - /** - * Serialize a java object as a string 序列化java对象为字符串 - * - * @param value - * @return - */ - public static String serializerJavaObject(Object value) { - return gson.toJson(value); - } - - /** - * Deserialize a string into a java object 将字符串解序列化为java对象 - * - * @param json - * @param classType - * @param - * @return - */ - public static T deserializerToJavaObject(String json, Class classType) { - return gson.fromJson(json, classType); - } - - public static T deserializerToJavaObject(String json, Type oType) { - return gson.fromJson(json, oType); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java index 9ca3425837..7187f72ce5 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java @@ -37,6 +37,14 @@ public enum LinkisStorageErrorCodeSummary implements LinkisErrorCode { 52004, "You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)"), UNSUPPORTED_OPEN_FILE_TYPE(54001, "Unsupported open file type(不支持打开的文件类型)"), + + RESULT_COL_LENGTH(52003, "Col value length {0} exceed limit {1}"), + + RESULT_COLUMN_INDEX_OUT_OF_BOUNDS(52004, "Column index value {0} exceed limit {1}"), + + RESULT_ROW_LENGTH(520034, "Row value length {0} exceed limit {1}"), + + RESULT_COL_SIZE(520035, "Col size length {0} exceed limit {1}"), INVALID_CUSTOM_PARAMETER(65000, "Invalid custom parameter(不合法的自定义参数)"); /** 错误码 */ diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java deleted file mode 100644 index cd3969e048..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.linkis.common.io.FsWriter; - -import java.io.OutputStream; - -public abstract class ExcelFsWriter extends FsWriter { - public abstract String getCharset(); - - public abstract String getSheetName(); - - public abstract String getDateFormat(); - - public abstract boolean isAutoFormat(); - - public static ExcelFsWriter getExcelFsWriter( - String charset, - String sheetName, - String dateFormat, - OutputStream outputStream, - boolean autoFormat) { - return new StorageExcelWriter(charset, sheetName, dateFormat, outputStream, autoFormat); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java deleted file mode 100644 index 98df7421f3..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.poi.hssf.eventusermodel.*; -import org.apache.poi.hssf.eventusermodel.EventWorkbookBuilder.SheetRecordCollectingListener; -import org.apache.poi.hssf.eventusermodel.dummyrecord.LastCellOfRowDummyRecord; -import org.apache.poi.hssf.eventusermodel.dummyrecord.MissingCellDummyRecord; -import org.apache.poi.hssf.model.HSSFFormulaParser; -import org.apache.poi.hssf.record.*; -import org.apache.poi.hssf.usermodel.HSSFWorkbook; -import org.apache.poi.poifs.filesystem.POIFSFileSystem; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ExcelXlsReader implements HSSFListener { - private static final Logger logger = LoggerFactory.getLogger(ExcelXlsReader.class); - - private int minColumns = -1; - - private POIFSFileSystem fs; - - private InputStream inputStream; - - private int lastRowNumber; - - private int lastColumnNumber; - - /** Should we output the formula, or the value it has? */ - private boolean outputFormulaValues = true; - - /** For parsing Formulas */ - private SheetRecordCollectingListener workbookBuildingListener; - - // excel2003Workbook(工作薄) - private HSSFWorkbook stubWorkbook; - - // Records we pick up as we process - private SSTRecord sstRecord; - - private FormatTrackingHSSFListener formatListener; - - // Table index(表索引) - private int sheetIndex = -1; - - private BoundSheetRecord[] orderedBSRs; - - @SuppressWarnings("unchecked") - private ArrayList boundSheetRecords = new ArrayList(); - - // For handling formulas with string results - private int nextRow; - - private int nextColumn; - - private boolean outputNextStringRecord; - - // Current line(当前行) - private int curRow = 0; - - // a container that stores row records(存储行记录的容器) - private List rowlist = new ArrayList(); - - @SuppressWarnings("unused") - private String sheetName; - - private IExcelRowDeal excelRowDeal; - - public void init(IExcelRowDeal excelRowDeal, InputStream inputStream) { - this.excelRowDeal = excelRowDeal; - this.inputStream = inputStream; - } - - /** - * Traverse all the sheets under excel 遍历excel下所有的sheet - * - * @throws IOException - */ - public void process() throws IOException { - this.fs = new POIFSFileSystem(this.inputStream); - MissingRecordAwareHSSFListener listener = new MissingRecordAwareHSSFListener(this); - formatListener = new FormatTrackingHSSFListener(listener); - HSSFEventFactory factory = new HSSFEventFactory(); - HSSFRequest request = new HSSFRequest(); - if (outputFormulaValues) { - request.addListenerForAllRecords(formatListener); - } else { - workbookBuildingListener = new SheetRecordCollectingListener(formatListener); - request.addListenerForAllRecords(workbookBuildingListener); - } - factory.processWorkbookEvents(request, fs); - } - - /** HSSFListener listener method, processing Record HSSFListener 监听方法,处理 Record */ - @Override - @SuppressWarnings("unchecked") - public void processRecord(Record record) { - int thisRow = -1; - int thisColumn = -1; - String thisStr = null; - String value = null; - switch (record.getSid()) { - case BoundSheetRecord.sid: - boundSheetRecords.add(record); - break; - case BOFRecord.sid: - BOFRecord br = (BOFRecord) record; - if (br.getType() == BOFRecord.TYPE_WORKSHEET) { - // Create a child workbook if needed(如果有需要,则建立子工作薄) - if (workbookBuildingListener != null && stubWorkbook == null) { - stubWorkbook = workbookBuildingListener.getStubHSSFWorkbook(); - } - - sheetIndex++; - if (orderedBSRs == null) { - orderedBSRs = BoundSheetRecord.orderByBofPosition(boundSheetRecords); - } - sheetName = orderedBSRs[sheetIndex].getSheetname(); - } - break; - - case SSTRecord.sid: - sstRecord = (SSTRecord) record; - break; - - case BlankRecord.sid: - BlankRecord brec = (BlankRecord) record; - thisRow = brec.getRow(); - thisColumn = brec.getColumn(); - thisStr = ""; - rowlist.add(thisColumn, thisStr); - break; - case BoolErrRecord.sid: // Cell is boolean(单元格为布尔类型) - BoolErrRecord berec = (BoolErrRecord) record; - thisRow = berec.getRow(); - thisColumn = berec.getColumn(); - thisStr = berec.getBooleanValue() + ""; - rowlist.add(thisColumn, thisStr); - break; - - case FormulaRecord.sid: // Cell is a formula type(单元格为公式类型) - FormulaRecord frec = (FormulaRecord) record; - thisRow = frec.getRow(); - thisColumn = frec.getColumn(); - if (outputFormulaValues) { - if (Double.isNaN(frec.getValue())) { - // Formula result is a string - // This is stored in the next record - outputNextStringRecord = true; - nextRow = frec.getRow(); - nextColumn = frec.getColumn(); - } else { - thisStr = formatListener.formatNumberDateCell(frec); - } - } else { - thisStr = - '"' - + HSSFFormulaParser.toFormulaString(stubWorkbook, frec.getParsedExpression()) - + '"'; - } - rowlist.add(thisColumn, thisStr); - break; - case StringRecord.sid: // a string of formulas in a cell(单元格中公式的字符串) - if (outputNextStringRecord) { - // String for formula - StringRecord srec = (StringRecord) record; - thisStr = srec.getString(); - thisRow = nextRow; - thisColumn = nextColumn; - outputNextStringRecord = false; - } - break; - case LabelRecord.sid: - LabelRecord lrec = (LabelRecord) record; - curRow = thisRow = lrec.getRow(); - thisColumn = lrec.getColumn(); - value = lrec.getValue().trim(); - value = value.equals("") ? " " : value; - this.rowlist.add(thisColumn, value); - break; - case LabelSSTRecord.sid: // Cell is a string type(单元格为字符串类型) - LabelSSTRecord lsrec = (LabelSSTRecord) record; - curRow = thisRow = lsrec.getRow(); - thisColumn = lsrec.getColumn(); - if (sstRecord == null) { - rowlist.add(thisColumn, " "); - } else { - value = sstRecord.getString(lsrec.getSSTIndex()).toString().trim(); - value = value.equals("") ? " " : value; - rowlist.add(thisColumn, value); - } - break; - case NumberRecord.sid: // Cell is a numeric type(单元格为数字类型) - NumberRecord numrec = (NumberRecord) record; - curRow = thisRow = numrec.getRow(); - thisColumn = numrec.getColumn(); - value = formatListener.formatNumberDateCell(numrec).trim(); - value = value.equals("") ? "0" : value; - // Add column values to the container(向容器加入列值) - rowlist.add(thisColumn, value); - break; - default: - break; - } - - // Encountered a new line of operations(遇到新行的操作)( - if (thisRow != -1 && thisRow != lastRowNumber) { - lastColumnNumber = -1; - } - - // Null operation(空值的操作) - if (record instanceof MissingCellDummyRecord) { - MissingCellDummyRecord mc = (MissingCellDummyRecord) record; - curRow = thisRow = mc.getRow(); - thisColumn = mc.getColumn(); - rowlist.add(thisColumn, " "); - } - - // Update row and column values(更新行和列的值) - if (thisRow > -1) lastRowNumber = thisRow; - if (thisColumn > -1) lastColumnNumber = thisColumn; - - // End of line operation(行结束时的操作) - if (record instanceof LastCellOfRowDummyRecord) { - if (minColumns > 0) { - // Column value is re-empted(列值重新置空) - if (lastColumnNumber == -1) { - lastColumnNumber = 0; - } - } - lastColumnNumber = -1; - - // At the end of each line, the dealRow() method(每行结束时, dealRow() 方法) - excelRowDeal.dealRow(orderedBSRs, sheetIndex, curRow, rowlist); - // Empty container(清空容器) - rowlist.clear(); - } - } - - public void close() { - try { - if (fs != null) { - fs.close(); - } - } catch (Exception e) { - logger.info("ExcelXlsReader fs closed failed", e); - } - - try { - if (inputStream != null) { - inputStream.close(); - } - } catch (IOException e) { - logger.info("ExcelXlsReader inputStream closed failed", e); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java deleted file mode 100644 index 6924a3ebb9..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.poi.hssf.record.BoundSheetRecord; - -import java.util.ArrayList; -import java.util.List; - -class FirstRowDeal implements IExcelRowDeal { - - private List sheetNames = new ArrayList<>(); - private List row; - - public List getSheetNames() { - return sheetNames; - } - - public void setSheetNames(List sheetNames) { - this.sheetNames = sheetNames; - } - - public List getRow() { - return row; - } - - public void setRow(List row) { - this.row = row; - } - - @Override - public void dealRow( - BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { - for (BoundSheetRecord record : orderedBSRs) { - sheetNames.add(record.getSheetname()); - } - row = rowlist; - throw new ExcelAnalysisException("Finished to deal first row"); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java deleted file mode 100644 index 7deccfb92a..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.poi.hssf.record.BoundSheetRecord; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.List; -import java.util.Map; - -class RowToCsvDeal implements IExcelRowDeal { - - private Map params; - private List sheetNames; - private OutputStream outputStream; - private Boolean hasHeader; - private Boolean fisrtRow = true; - - public void init(Boolean hasHeader, List sheetNames, OutputStream outputStream) { - this.hasHeader = hasHeader; - this.sheetNames = sheetNames; - this.outputStream = outputStream; - } - - @Override - public void dealRow( - BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { - String sheetName = orderedBSRs[sheetIndex].getSheetname(); - if (sheetNames == null || sheetNames.isEmpty() || sheetNames.contains(sheetName)) { - if (!(curRow == 0 && hasHeader)) { - try { - if (fisrtRow) { - fisrtRow = false; - } else { - outputStream.write("\n".getBytes()); - } - int len = rowlist.size(); - for (int i = 0; i < len; i++) { - outputStream.write(rowlist.get(i).replaceAll("\n|\t", " ").getBytes("utf-8")); - if (i < len - 1) { - outputStream.write("\t".getBytes()); - } - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java deleted file mode 100644 index 6b2a98c72b..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.io.IOUtils; -import org.apache.poi.ss.usermodel.*; -import org.apache.poi.xssf.streaming.SXSSFSheet; -import org.apache.poi.xssf.streaming.SXSSFWorkbook; - -import java.io.*; -import java.math.BigDecimal; -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageExcelWriter extends ExcelFsWriter { - - private static Logger logger = LoggerFactory.getLogger(StorageExcelWriter.class); - - private String charset; - private String sheetName; - private String dateFormat; - private OutputStream outputStream; - private boolean autoFormat; - protected SXSSFWorkbook workBook; - protected SXSSFSheet sheet; - private DataFormat format; - protected DataType[] types; - protected int rowPoint; - protected int columnCounter; - protected Map styles = new HashMap<>(); - private boolean isFlush = true; - private ByteArrayOutputStream os = new ByteArrayOutputStream(); - private ByteArrayInputStream is; - - public StorageExcelWriter( - String charset, - String sheetName, - String dateFormat, - OutputStream outputStream, - boolean autoFormat) { - this.charset = charset; - this.sheetName = sheetName; - this.dateFormat = dateFormat; - this.outputStream = outputStream; - this.autoFormat = autoFormat; - } - - public void init() { - workBook = new SXSSFWorkbook(); - sheet = workBook.createSheet(sheetName); - } - - public CellStyle getDefaultHeadStyle() { - Font headerFont = workBook.createFont(); - headerFont.setBold(true); - headerFont.setFontHeightInPoints((short) 14); - headerFont.setColor(IndexedColors.RED.getIndex()); - CellStyle headerCellStyle = workBook.createCellStyle(); - headerCellStyle.setFont(headerFont); - return headerCellStyle; - } - - public Workbook getWorkBook() { - // 自适应列宽 - sheet.trackAllColumnsForAutoSizing(); - for (int elem = 0; elem <= columnCounter; elem++) { - sheet.autoSizeColumn(elem); - } - return workBook; - } - - public CellStyle createCellStyle(DataType dataType) { - CellStyle style = workBook.createCellStyle(); - format = workBook.createDataFormat(); - style.setDataFormat(format.getFormat("@")); - - if (autoFormat) { - switch (dataType) { - case StringType: - case CharType: - case VarcharType: - style.setDataFormat(format.getFormat("@")); - break; - case TinyIntType: - case ShortIntType: - case IntType: - style.setDataFormat(format.getFormat("#")); - break; - case LongType: - case BigIntType: - style.setDataFormat(format.getFormat("#.##E+00")); - break; - case FloatType: - style.setDataFormat(format.getFormat("#.0000000000")); - break; - case DoubleType: - style.setDataFormat(format.getFormat("#.0000000000")); - break; - case DateType: - case TimestampType: - style.setDataFormat(format.getFormat("m/d/yy h:mm")); - break; - case DecimalType: - case BigDecimalType: - style.setDataFormat(format.getFormat("#.000000000")); - break; - default: - style.setDataFormat(format.getFormat("@")); - } - } - return style; - } - - public CellStyle getCellStyle(DataType dataType) { - CellStyle style = styles.get(dataType.getTypeName()); - if (style == null) { - CellStyle newStyle = createCellStyle(dataType); - styles.put(dataType.getTypeName(), newStyle); - return newStyle; - } else { - return style; - } - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - init(); - Row tableHead = sheet.createRow(0); - Column[] columns = ((TableMetaData) metaData).getColumns(); - List columnType = new ArrayList<>(); - for (int i = 0; i < columns.length; i++) { - Cell headCell = tableHead.createCell(columnCounter); - headCell.setCellValue(columns[i].getColumnName()); - headCell.setCellStyle(getDefaultHeadStyle()); - columnType.add(columns[i].getDataType()); - columnCounter++; - } - types = columnType.toArray(new DataType[0]); - rowPoint++; - } - - @Override - public void addRecord(Record record) throws IOException { - // TODO: 是否需要替换null值 - Row tableBody = sheet.createRow(rowPoint); - int colunmPoint = 0; - Object[] excelRecord = ((TableRecord) record).row; - for (Object elem : excelRecord) { - Cell cell = tableBody.createCell(colunmPoint); - DataType dataType = types[colunmPoint]; - if (autoFormat) { - setCellTypeValue(dataType, elem, cell); - } else { - cell.setCellValue(DataType.valueToString(elem)); - } - cell.setCellStyle(getCellStyle(dataType)); - colunmPoint++; - } - rowPoint++; - } - - private void setCellTypeValue(DataType dataType, Object elem, Cell cell) { - if (null == elem) return; - - try { - switch (dataType) { - case StringType: - case CharType: - case VarcharType: - cell.setCellValue(DataType.valueToString(elem)); - break; - case TinyIntType: - case ShortIntType: - case IntType: - cell.setCellValue(Integer.valueOf(elem.toString())); - break; - case LongType: - case BigIntType: - cell.setCellValue(Long.valueOf(elem.toString())); - break; - case FloatType: - cell.setCellValue(Float.valueOf(elem.toString())); - break; - case DoubleType: - doubleCheck(elem.toString()); - cell.setCellValue(Double.valueOf(elem.toString())); - break; - case DateType: - case TimestampType: - cell.setCellValue(getDate(elem)); - break; - case DecimalType: - case BigDecimalType: - doubleCheck(DataType.valueToString(elem)); - cell.setCellValue(Double.valueOf(DataType.valueToString(elem))); - break; - default: - cell.setCellValue(DataType.valueToString(elem)); - } - } catch (Exception e) { - cell.setCellValue(DataType.valueToString(elem)); - } - } - - private Date getDate(Object value) { - if (value instanceof Date) { - return (Date) value; - } else { - throw new NumberFormatException( - "Value " - + value - + " with class : " - + value.getClass().getName() - + " is not a valid type of Date."); - } - } - - /** - * Check whether the double exceeds the number of digits, which will affect the data accuracy - * - * @param elemValue - */ - private void doubleCheck(String elemValue) { - BigDecimal value = new BigDecimal(elemValue).stripTrailingZeros(); - if ((value.precision() - value.scale()) > 15) { - throw new NumberFormatException( - "Value " + elemValue + " error : This data exceeds 15 significant digits."); - } - } - - @Override - public void flush() { - try { - getWorkBook().write(os); - } catch (IOException e) { - logger.warn("flush fail", e); - } - byte[] content = os.toByteArray(); - is = new ByteArrayInputStream(content); - byte[] buffer = new byte[1024]; - int bytesRead = 0; - while (isFlush) { - try { - bytesRead = is.read(buffer, 0, 1024); - if (bytesRead == -1) { - isFlush = false; - } else { - outputStream.write(buffer, 0, bytesRead); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - @Override - public void close() { - if (isFlush) { - flush(); - } - IOUtils.closeQuietly(outputStream); - IOUtils.closeQuietly(is); - IOUtils.closeQuietly(os); - IOUtils.closeQuietly(workBook); - } - - @Override - public String getCharset() { - return this.charset; - } - - @Override - public String getSheetName() { - return this.sheetName; - } - - @Override - public String getDateFormat() { - return this.dateFormat; - } - - @Override - public boolean isAutoFormat() { - return this.autoFormat; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java index 246fb79bc3..39d89c3d96 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java @@ -21,12 +21,17 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.openxml4j.util.ZipSecureFile; +import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.Row; +import org.apache.poi.ss.usermodel.Sheet; +import org.apache.poi.ss.usermodel.Workbook; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,7 +66,7 @@ public static String excelToCsv( throws Exception { String hdfsPath = "/tmp/" + StorageUtils.getJvmUser() + "/" + System.currentTimeMillis() + ".csv"; - LOG.info("The excel to csv with hdfsPath:" + hdfsPath); + LOG.info("The excel to csv with hdfs path:" + hdfsPath); ExcelXlsReader xlsReader = new ExcelXlsReader(); RowToCsvDeal rowToCsvDeal = new RowToCsvDeal(); OutputStream out = null; @@ -81,4 +86,44 @@ public static String excelToCsv( } return hdfsPath; } + + public static Map>> getSheetsInfo( + InputStream inputStream, Boolean hasHeader) { + // use xls file + Workbook workbook = null; + try { + // 压缩膨胀比率,处理excel行或者列过多的情况,不能设置再小了,会导致内存过大 + ZipSecureFile.setMinInflateRatio(0.005); + workbook = new HSSFWorkbook(inputStream); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + // 使用完最后需要还原 + ZipSecureFile.setMinInflateRatio(0.01); + } + Map>> res = new LinkedHashMap<>(workbook.getNumberOfSheets()); + // foreach Sheet + for (int i = 0; i < workbook.getNumberOfSheets(); i++) { + Sheet sheet = workbook.getSheetAt(i); + + List> rowList = new ArrayList<>(); + + // get first row as column name + Row headerRow = sheet.getRow(0); + + // foreach column + for (int j = 0; j < headerRow.getPhysicalNumberOfCells(); j++) { + Map sheetMap = new LinkedHashMap<>(); + Cell cell = headerRow.getCell(j); + if (hasHeader) { + sheetMap.put(cell.getStringCellValue(), "string"); + } else { + sheetMap.put("col_" + (j + 1), "string"); + } + rowList.add(sheetMap); + } + res.put(sheet.getSheetName(), rowList); + } + return res; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java index 7cbf579a9d..c2418f4c33 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java @@ -23,10 +23,9 @@ import org.apache.poi.ss.usermodel.Workbook; import java.io.File; +import java.io.IOException; import java.io.InputStream; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; +import java.util.*; import com.github.pjfanning.xlsx.StreamingReader; @@ -79,4 +78,54 @@ public static List> getBasicInfo(InputStream inputStream, File file } } } + + public static Map>> getAllSheetInfo( + InputStream inputStream, File file, Boolean hasHeader) throws IOException { + try { + Workbook wb = null; + if (inputStream != null) { + wb = + StreamingReader.builder() + // number of rows to keep in memory (defaults to 10) + .rowCacheSize(2) + .open(inputStream); + } else { + wb = + StreamingReader.builder() + // number of rows to keep in memory (defaults to 10) + .rowCacheSize(2) + .open(file); + } + Map>> res = new LinkedHashMap<>(wb.getNumberOfSheets()); + for (Sheet sheet : wb) { + Iterator iterator = sheet.iterator(); + Row row = null; + while (iterator.hasNext() && row == null) { + row = iterator.next(); + } + List> rowList = new ArrayList<>(); + if (row == null) { + res.put(sheet.getSheetName(), rowList); + continue; + } + int cellIdx = 0; + for (Cell cell : row) { + Map item = new LinkedHashMap<>(); + if (hasHeader) { + item.put(cell.getStringCellValue(), "string"); + } else { + item.put("col_" + (cellIdx + 1), "string"); + } + cellIdx++; + rowList.add(item); + } + res.put(sheet.getSheetName(), rowList); + } + return res; + } finally { + if (inputStream != null) { + inputStream.close(); + } + } + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineUnlock.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java similarity index 63% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineUnlock.java rename to linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java index 323f57cf19..955a8e1d60 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineUnlock.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java @@ -15,22 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.protocol; +package org.apache.linkis.storage.exception; -public class ResponseEngineUnlock implements EngineLock { - public ResponseEngineUnlock(boolean unlocked) { - this.unlocked = unlocked; - } - - public ResponseEngineUnlock() {} +public class ColLengthExceedException extends StorageWarnException { - private boolean unlocked; + public ColLengthExceedException(int errCode, String desc) { + super(errCode, desc); + } - public boolean getUnlocked() { - return unlocked; + public ColLengthExceedException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); } - public void setUnlocked(boolean unlocked) { - this.unlocked = unlocked; + public ColLengthExceedException(int errCode, String desc, Throwable t) { + super(errCode, desc, t); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java new file mode 100644 index 0000000000..969b19d20b --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.exception; + +public class ColumnIndexExceedException extends StorageWarnException { + + public ColumnIndexExceedException(int errCode, String desc) { + super(errCode, desc); + } + + public ColumnIndexExceedException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public ColumnIndexExceedException(int errCode, String desc, Throwable t) { + super(errCode, desc, t); + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java index fad0d83a12..ae66c1cf99 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java @@ -21,9 +21,13 @@ public enum StorageErrorCode { /** */ FS_NOT_INIT(53001, "please init first"), - INCONSISTENT_DATA(53001, "Inconsistent row data read,read %s,need rowLen %s"), - FS_OOM(53002, "OOM occurred while reading the file"); + FS_OOM(53002, "OOM occurred while reading the file"), + FS_ERROR(53003, "Failed to operation fs"), + + READ_PARQUET_FAILED(53004, "Failed to read parquet file"), + + READ_ORC_FAILED(53005, "Failed to read orc file"); StorageErrorCode(int errorCode, String message) { this.code = errorCode; diff --git a/linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java similarity index 65% rename from linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.java rename to linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java index 6c5d106f6a..dedad1140c 100644 --- a/linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java @@ -15,14 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.manager.engineplugin.shell.exception; +package org.apache.linkis.storage.exception; import org.apache.linkis.common.exception.ErrorException; -import static org.apache.linkis.manager.engineplugin.shell.errorcode.LinkisCommonsErrorCodeSummary.*; +public class StorageReadException extends ErrorException { -public class NoCorrectUserException extends ErrorException { - public NoCorrectUserException() { - super(NO_ILLEGAL_USER_HOLDS.getErrorCode(), NO_ILLEGAL_USER_HOLDS.getErrorDesc()); + public StorageReadException(int errCode, String desc) { + super(errCode, desc); + } + + public StorageReadException(int errCode, String desc, Throwable t) { + super(errCode, desc); + initCause(t); + } + + public StorageReadException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java index 8103c6f3de..9f53a6249b 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java @@ -21,7 +21,7 @@ import org.apache.linkis.storage.factory.BuildFactory; import org.apache.linkis.storage.fs.FileSystem; import org.apache.linkis.storage.fs.impl.HDFSFileSystem; -import org.apache.linkis.storage.io.IOMethodInterceptorFactory; +import org.apache.linkis.storage.io.IOMethodInterceptorCreator$; import org.apache.linkis.storage.utils.StorageUtils; import org.springframework.cglib.proxy.Enhancer; @@ -46,7 +46,7 @@ public Fs getFs(String user, String proxyUser) { // TODO Agent user(代理的用户) Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(HDFSFileSystem.class.getSuperclass()); - enhancer.setCallback(IOMethodInterceptorFactory.getIOMethodInterceptor(fsName())); + enhancer.setCallback(IOMethodInterceptorCreator$.MODULE$.getIOMethodInterceptor(fsName())); fs = (FileSystem) enhancer.create(); } fs.setUser(proxyUser); @@ -63,6 +63,6 @@ public Fs getFs(String user, String proxyUser, String label) { @Override public String fsName() { - return StorageUtils.HDFS; + return StorageUtils.HDFS(); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java index bcd61c5735..ef88cec36d 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java @@ -21,7 +21,7 @@ import org.apache.linkis.storage.factory.BuildFactory; import org.apache.linkis.storage.fs.FileSystem; import org.apache.linkis.storage.fs.impl.LocalFileSystem; -import org.apache.linkis.storage.io.IOMethodInterceptorFactory; +import org.apache.linkis.storage.io.IOMethodInterceptorCreator$; import org.apache.linkis.storage.utils.StorageConfiguration; import org.apache.linkis.storage.utils.StorageUtils; @@ -34,7 +34,7 @@ public Fs getFs(String user, String proxyUser) { FileSystem fs = null; if (StorageUtils.isIOProxy()) { if (user.equals(proxyUser)) { - if ((Boolean) StorageConfiguration.IS_SHARE_NODE.getValue()) { + if ((Boolean) StorageConfiguration.IS_SHARE_NODE().getValue()) { fs = new LocalFileSystem(); } else { fs = getProxyFs(); @@ -58,12 +58,12 @@ public Fs getFs(String user, String proxyUser, String label) { private FileSystem getProxyFs() { Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(LocalFileSystem.class.getSuperclass()); - enhancer.setCallback(IOMethodInterceptorFactory.getIOMethodInterceptor(fsName())); + enhancer.setCallback(IOMethodInterceptorCreator$.MODULE$.getIOMethodInterceptor(fsName())); return (FileSystem) enhancer.create(); } @Override public String fsName() { - return StorageUtils.FILE; + return StorageUtils.FILE(); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java deleted file mode 100644 index ba1bd7abef..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.factory.impl; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.storage.factory.BuildFactory; -import org.apache.linkis.storage.fs.impl.OSSFileSystem; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class BuildOSSSystem implements BuildFactory { - - private static final Logger LOG = LoggerFactory.getLogger(BuildOSSSystem.class); - - /** - * get file system - * - * @param user - * @param proxyUser - * @return - */ - @Override - public Fs getFs(String user, String proxyUser) { - OSSFileSystem fs = new OSSFileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(user); - return fs; - } - - @Override - public Fs getFs(String user, String proxyUser, String label) { - OSSFileSystem fs = new OSSFileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(proxyUser); - fs.setLabel(label); - return fs; - } - - @Override - public String fsName() { - return StorageUtils.OSS; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java deleted file mode 100644 index 44082e5898..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.factory.impl; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.storage.factory.BuildFactory; -import org.apache.linkis.storage.fs.impl.S3FileSystem; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class BuildS3FileSystem implements BuildFactory { - private static final Logger LOG = LoggerFactory.getLogger(BuildS3FileSystem.class); - - @Override - public Fs getFs(String user, String proxyUser) { - S3FileSystem fs = new S3FileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(user); - return fs; - } - - @Override - public Fs getFs(String user, String proxyUser, String label) { - S3FileSystem fs = new S3FileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(user); - fs.setLabel(label); - return fs; - } - - @Override - public String fsName() { - return StorageUtils.S3; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java index 3067383b6c..ac828267bf 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java @@ -51,6 +51,10 @@ public String getDefaultFolderPerm() { public abstract long getUsableSpace(FsPath dest) throws IOException; + public abstract long getLength(FsPath dest) throws IOException; + + public abstract String checkSum(FsPath dest) throws IOException; + public abstract boolean canExecute(FsPath dest) throws IOException; public abstract boolean setOwner(FsPath dest, String user, String group) throws IOException; @@ -99,7 +103,7 @@ protected FsPath getParentPath(String path) { } else { parentPath = path.substring(0, path.lastIndexOf("/")); } - LOG.info("Get Parent Path:" + parentPath); + LOG.info("Get parent path:" + parentPath); return new FsPath(parentPath); } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java index 74260470a6..c4f4814149 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java @@ -31,9 +31,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.UserGroupInformation; @@ -82,7 +80,7 @@ public long getUsableSpace(FsPath dest) throws IOException { @Override public boolean canExecute(FsPath dest) throws IOException { - return canAccess(dest, FsAction.EXECUTE); + return canAccess(dest, FsAction.EXECUTE, this.user); } @Override @@ -162,7 +160,8 @@ public FsPathListWithError listPathWithError(FsPath path) throws IOException { List fsPaths = new ArrayList(); for (FileStatus f : stat) { fsPaths.add( - fillStorageFile(new FsPath(StorageUtils.HDFS_SCHEMA + f.getPath().toUri().getPath()), f)); + fillStorageFile( + new FsPath(StorageUtils.HDFS_SCHEMA() + f.getPath().toUri().getPath()), f)); } if (fsPaths.isEmpty()) { return null; @@ -174,35 +173,39 @@ public FsPathListWithError listPathWithError(FsPath path) throws IOException { @Override public void init(Map properties) throws IOException { if (MapUtils.isNotEmpty(properties) - && properties.containsKey(StorageConfiguration.PROXY_USER.key())) { - user = StorageConfiguration.PROXY_USER.getValue(properties); + && properties.containsKey(StorageConfiguration.PROXY_USER().key())) { + user = StorageConfiguration.PROXY_USER().getValue(properties); + properties.remove(StorageConfiguration.PROXY_USER().key()); } if (user == null) { throw new IOException("User cannot be empty(用户不能为空)"); } - - if (label == null && (boolean) Configuration.IS_MULTIPLE_YARN_CLUSTER().getValue()) { - label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL.getValue(); + if (label == null && Configuration.IS_MULTIPLE_YARN_CLUSTER()) { + label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL().getValue(); } - conf = HDFSUtils.getConfigurationByLabel(user, label); - + /** if properties is null do not to create conf */ if (MapUtils.isNotEmpty(properties)) { - for (String key : properties.keySet()) { - String v = properties.get(key); - if (StringUtils.isNotEmpty(v)) { - conf.set(key, v); + conf = HDFSUtils.getConfigurationByLabel(user, label); + if (MapUtils.isNotEmpty(properties)) { + for (String key : properties.keySet()) { + String v = properties.get(key); + if (StringUtils.isNotEmpty(v)) { + conf.set(key, v); + } } } } - if (StorageConfiguration.FS_CACHE_DISABLE.getValue()) { - conf.set("fs.hdfs.impl.disable.cache", "true"); + if (null != conf) { + fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + } else { + fs = HDFSUtils.getHDFSUserFileSystem(user, label); } - fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + if (fs == null) { throw new IOException("init HDFS FileSystem failed!"); } - if (StorageConfiguration.FS_CHECKSUM_DISBALE.getValue()) { + if (StorageConfiguration.FS_CHECKSUM_DISBALE().getValue()) { fs.setVerifyChecksum(false); fs.setWriteChecksum(false); } @@ -215,7 +218,7 @@ public String fsName() { @Override public String rootUserName() { - return StorageConfiguration.HDFS_ROOT_USER.getValue(); + return StorageConfiguration.HDFS_ROOT_USER().getValue(); } @Override @@ -248,7 +251,6 @@ public OutputStream write(FsPath dest, boolean overwrite) throws IOException { return fs.append(new Path(path)); } else { OutputStream out = fs.create(new Path(path), true); - this.setPermission(dest, this.getDefaultFilePerm()); return out; } } @@ -293,12 +295,16 @@ public List list(FsPath path) throws IOException { @Override public boolean canRead(FsPath dest) throws IOException { - return canAccess(dest, FsAction.READ); + return canAccess(dest, FsAction.READ, this.user); + } + + public boolean canRead(FsPath dest, String user) throws IOException { + return canAccess(dest, FsAction.READ, user); } @Override public boolean canWrite(FsPath dest) throws IOException { - return canAccess(dest, FsAction.WRITE); + return canAccess(dest, FsAction.WRITE, this.user); } @Override @@ -308,10 +314,10 @@ public boolean exists(FsPath dest) throws IOException { } catch (IOException e) { String message = e.getMessage(); String rootCauseMessage = ExceptionUtils.getRootCauseMessage(e); - if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS)) + if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS())) || (rootCauseMessage != null - && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS))) { - logger.info("Failed to execute exists, retry", e); + && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS()))) { + logger.info("Failed to execute exists for user {}, retry", user, e); resetRootHdfs(); return fs.exists(new Path(checkHDFSPath(dest.getPath()))); } else { @@ -329,8 +335,12 @@ private void resetRootHdfs() { } else { HDFSUtils.closeHDFSFIleSystem(fs, user, label); } - logger.warn(user + "FS reset close."); - fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + logger.warn("{} FS reset close.", user); + if (null != conf) { + fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + } else { + fs = HDFSUtils.getHDFSUserFileSystem(user, label); + } } } } @@ -357,7 +367,7 @@ public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { @Override public void close() throws IOException { if (null != fs) { - HDFSUtils.closeHDFSFIleSystem(fs, user, label); + HDFSUtils.closeHDFSFIleSystem(fs, user); } else { logger.warn("FS was null, cannot close."); } @@ -383,7 +393,7 @@ private FsPath fillStorageFile(FsPath fsPath, FileStatus fileStatus) throws IOEx return fsPath; } - private boolean canAccess(FsPath fsPath, FsAction access) throws IOException { + private boolean canAccess(FsPath fsPath, FsAction access, String user) throws IOException { String path = checkHDFSPath(fsPath.getPath()); if (!exists(fsPath)) { throw new IOException("directory or file not exists: " + path); @@ -391,12 +401,12 @@ private boolean canAccess(FsPath fsPath, FsAction access) throws IOException { FileStatus f = fs.getFileStatus(new Path(path)); FsPermission permission = f.getPermission(); - UserGroupInformation ugi = HDFSUtils.getUserGroupInformation(user, label); + UserGroupInformation ugi = HDFSUtils.getUserGroupInformation(user); String[] groupNames; try { groupNames = ugi.getGroupNames(); } catch (NullPointerException e) { - if ((Boolean) Configuration.IS_TEST_MODE().getValue()) { + if ((Boolean) org.apache.linkis.common.conf.Configuration.IS_TEST_MODE().getValue()) { groupNames = new String[] {"hadoop"}; } else { throw e; @@ -428,9 +438,9 @@ public void setLabel(String label) { private String checkHDFSPath(String path) { try { - boolean checkHdfsPath = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue(); + boolean checkHdfsPath = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON().getValue(); if (checkHdfsPath) { - boolean rmHdfsPrefix = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue(); + boolean rmHdfsPrefix = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_REMOVE().getValue(); if (rmHdfsPrefix) { if (StringUtils.isBlank(path)) { return path; @@ -466,4 +476,21 @@ private String checkHDFSPath(String path) { } return path; } + + @Override + public long getLength(FsPath dest) throws IOException { + FileStatus fileStatus = fs.getFileStatus(new Path(checkHDFSPath(dest.getPath()))); + return fileStatus.getLen(); + } + + @Override + public String checkSum(FsPath dest) throws IOException { + String path = checkHDFSPath(dest.getPath()); + if (!exists(dest)) { + throw new IOException("directory or file not exists: " + path); + } + MD5MD5CRC32FileChecksum fileChecksum = + (MD5MD5CRC32FileChecksum) fs.getFileChecksum(new Path(path)); + return fileChecksum.toString().split(":")[1]; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java index 0e3066489b..a03a25950e 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java @@ -47,12 +47,7 @@ import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipalLookupService; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Stack; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -113,7 +108,7 @@ public boolean setOwner(FsPath dest, String user, String group) throws IOExcepti if (group != null) { setGroup(dest, group); } - setGroup(dest, StorageConfiguration.STORAGE_USER_GROUP.getValue()); + setGroup(dest, StorageConfiguration.STORAGE_USER_GROUP().getValue()); return true; } @@ -136,7 +131,7 @@ public boolean setOwner(FsPath dest, String user) throws IOException { @Override public boolean setGroup(FsPath dest, String group) throws IOException { - LOG.info("Set group with path:" + dest.getPath() + "and group:" + user); + LOG.info("Set group with path:" + dest.getPath() + " and group:" + group); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setGroup skip"); return true; @@ -262,9 +257,17 @@ public FsPathListWithError listPathWithError(FsPath path) throws IOException { LOG.info("Try to list path:" + path.getPath() + " with error msg"); if (files != null) { List rtn = new ArrayList(); + Set fileNameSet = new HashSet<>(); + fileNameSet.add(path.getPath().trim()); String message = ""; for (File f : files) { try { + if (fileNameSet.contains(f.getPath())) { + LOG.info("File {} is duplicate", f.getPath()); + continue; + } else { + fileNameSet.add(f.getParent().trim()); + } rtn.add(get(f.getPath())); } catch (Throwable e) { LOG.warn("Failed to list path:", e); @@ -289,15 +292,15 @@ public void init(Map properties) throws IOException { if (MapUtils.isNotEmpty(properties)) { this.properties = properties; - if (properties.containsKey(StorageConfiguration.PROXY_USER.key())) { - user = StorageConfiguration.PROXY_USER.getValue(properties); + if (properties.containsKey(StorageConfiguration.PROXY_USER().key())) { + user = StorageConfiguration.PROXY_USER().getValue(properties); } - group = StorageConfiguration.STORAGE_USER_GROUP.getValue(properties); + group = StorageConfiguration.STORAGE_USER_GROUP().getValue(properties); } else { this.properties = new HashMap(); } if (FsPath.WINDOWS) { - group = StorageConfiguration.STORAGE_USER_GROUP.getValue(properties); + group = StorageConfiguration.STORAGE_USER_GROUP().getValue(properties); } if (StringUtils.isEmpty(group)) { String groupInfo; @@ -320,7 +323,7 @@ public String fsName() { @Override public String rootUserName() { - return StorageConfiguration.LOCAL_ROOT_USER.getValue(); + return StorageConfiguration.LOCAL_ROOT_USER().getValue(); } @Override @@ -422,6 +425,11 @@ public boolean canRead(FsPath dest) throws IOException { PosixFilePermission.OTHERS_READ); } + @Override + public boolean canRead(FsPath dest, String user) throws IOException { + return false; + } + @Override public boolean canWrite(FsPath dest) throws IOException { return can( @@ -489,4 +497,16 @@ private String getOwner(String path) throws IOException { PosixFileAttributes attr = Files.readAttributes(Paths.get(path), PosixFileAttributes.class); return attr.owner().getName(); } + + @Override + public long getLength(FsPath dest) throws IOException { + String path = dest.getPath(); + LOG.info("Get file length with path:" + path); + return new File(path).length(); + } + + @Override + public String checkSum(FsPath dest) { + return null; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java deleted file mode 100644 index 99cf159a01..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.fs.impl; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.hadoop.common.utils.HDFSUtils; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.domain.FsPathListWithError; -import org.apache.linkis.storage.fs.FileSystem; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.fs.permission.FsPermission; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import com.google.common.collect.Maps; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class OSSFileSystem extends FileSystem { - - public static final String OSS_PREFIX = "oss://"; - private org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem fs = null; - private Configuration conf = null; - - private String label = null; - - private static final Logger logger = LoggerFactory.getLogger(OSSFileSystem.class); - - /** File System abstract method start */ - @Override - public String listRoot() throws IOException { - return "/"; - } - - @Override - public long getTotalSpace(FsPath dest) throws IOException { - return 0; - } - - @Override - public long getFreeSpace(FsPath dest) throws IOException { - return 0; - } - - @Override - public long getUsableSpace(FsPath dest) throws IOException { - return 0; - } - - @Override - public boolean canExecute(FsPath dest) throws IOException { - return true; - } - - @Override - public boolean setOwner(FsPath dest, String user, String group) throws IOException { - return true; - } - - @Override - public boolean setOwner(FsPath dest, String user) throws IOException { - return true; - } - - @Override - public boolean setGroup(FsPath dest, String group) throws IOException { - return true; - } - - @Override - public boolean mkdir(FsPath dest) throws IOException { - String path = checkOSSPath(dest.getPath()); - if (!canExecute(getParentPath(path))) { - throw new IOException("You have not permission to access path " + path); - } - boolean result = - fs.mkdirs(new Path(path), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); - this.setPermission(new FsPath(path), this.getDefaultFolderPerm()); - return result; - } - - @Override - public boolean mkdirs(FsPath dest) throws IOException { - String path = checkOSSPath(dest.getPath()); - FsPath parentPath = getParentPath(path); - while (!exists(parentPath)) { - parentPath = getParentPath(parentPath.getPath()); - } - return fs.mkdirs(new Path(path), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); - } - - @Override - public boolean setPermission(FsPath dest, String permission) throws IOException { - return true; - } - - @Override - public FsPathListWithError listPathWithError(FsPath path) throws IOException { - FileStatus[] stat = fs.listStatus(new Path(checkOSSPath(path.getPath()))); - List fsPaths = new ArrayList(); - for (FileStatus f : stat) { - fsPaths.add( - fillStorageFile( - new FsPath( - StorageUtils.OSS_SCHEMA - + StorageConfiguration.OSS_ACCESS_BUCKET_NAME.getValue() - + "/" - + f.getPath().toUri().getPath()), - f)); - } - if (fsPaths.isEmpty()) { - return null; - } - return new FsPathListWithError(fsPaths, ""); - } - - /** FS interface method start */ - @Override - public void init(Map properties) throws IOException { - // read origin configs from hadoop conf - if (label == null - && (boolean) - org.apache.linkis.common.conf.Configuration.IS_MULTIPLE_YARN_CLUSTER().getValue()) { - label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL.getValue(); - } - conf = HDFSUtils.getConfigurationByLabel(user, label); - - // origin configs - Map originProperties = Maps.newHashMap(); - originProperties.put("fs.oss.endpoint", StorageConfiguration.OSS_ENDPOINT.getValue()); - originProperties.put("fs.oss.accessKeyId", StorageConfiguration.OSS_ACCESS_KEY_ID.getValue()); - originProperties.put( - "fs.oss.accessKeySecret", StorageConfiguration.OSS_ACCESS_KEY_SECRET.getValue()); - for (String key : originProperties.keySet()) { - String value = originProperties.get(key); - if (StringUtils.isNotBlank(value)) { - conf.set(key, value); - } - } - - // additional configs - if (MapUtils.isNotEmpty(properties)) { - for (String key : properties.keySet()) { - String v = properties.get(key); - if (StringUtils.isNotBlank(v)) { - conf.set(key, v); - } - } - } - fs = new AliyunOSSFileSystem(); - try { - fs.initialize( - new URI(StorageUtils.OSS_SCHEMA + StorageConfiguration.OSS_ACCESS_BUCKET_NAME.getValue()), - conf); - } catch (URISyntaxException e) { - throw new IOException("init OSS FileSystem failed!"); - } - if (fs == null) { - throw new IOException("init OSS FileSystem failed!"); - } - } - - @Override - public String fsName() { - return StorageUtils.OSS; - } - - @Override - public String rootUserName() { - return null; - } - - @Override - public FsPath get(String dest) throws IOException { - String realPath = checkOSSPath(dest); - return fillStorageFile(new FsPath(realPath), fs.getFileStatus(new Path(realPath))); - } - - @Override - public InputStream read(FsPath dest) throws IOException { - if (!canRead(dest)) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - return fs.open(new Path(dest.getPath()), 128); - } - - @Override - public OutputStream write(FsPath dest, boolean overwrite) throws IOException { - String path = checkOSSPath(dest.getPath()); - if (!exists(dest)) { - if (!canWrite(dest.getParent())) { - throw new IOException("You have not permission to access path " + dest.getParent()); - } - } else { - if (!canWrite(dest)) { - throw new IOException("You have not permission to access path " + path); - } - } - OutputStream out = - fs.create( - new Path(path), - new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL), - overwrite, - 0, - (short) 0, - 0L, - null); - this.setPermission(dest, this.getDefaultFilePerm()); - return out; - } - - @Override - public boolean create(String dest) throws IOException { - if (!canExecute(getParentPath(dest))) { - throw new IOException("You have not permission to access path " + dest); - } - // to do - boolean result = fs.createNewFile(new Path(checkOSSPath(dest))); - this.setPermission(new FsPath(dest), this.getDefaultFilePerm()); - return result; - } - - @Override - public boolean copy(String origin, String dest) throws IOException { - if (!canExecute(getParentPath(dest))) { - throw new IOException("You have not permission to access path " + dest); - } - boolean res = - FileUtil.copy( - fs, - new Path(checkOSSPath(origin)), - fs, - new Path(checkOSSPath(dest)), - false, - true, - fs.getConf()); - this.setPermission(new FsPath(dest), this.getDefaultFilePerm()); - return res; - } - - @Override - public List list(FsPath path) throws IOException { - FileStatus[] stat = fs.listStatus(new Path(checkOSSPath(path.getPath()))); - List fsPaths = new ArrayList(); - for (FileStatus f : stat) { - fsPaths.add(fillStorageFile(new FsPath(f.getPath().toUri().toString()), f)); - } - return fsPaths; - } - - @Override - public boolean canRead(FsPath dest) throws IOException { - return true; - } - - @Override - public boolean canWrite(FsPath dest) throws IOException { - return true; - } - - @Override - public boolean exists(FsPath dest) throws IOException { - try { - return fs.exists(new Path(checkOSSPath(dest.getPath()))); - } catch (IOException e) { - String message = e.getMessage(); - String rootCauseMessage = ExceptionUtils.getRootCauseMessage(e); - if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS)) - || (rootCauseMessage != null - && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS))) { - logger.info("Failed to execute exists, retry", e); - resetRootOSS(); - return fs.exists(new Path(checkOSSPath(dest.getPath()))); - } else { - throw e; - } - } - } - - private void resetRootOSS() throws IOException { - if (fs != null) { - synchronized (this) { - if (fs != null) { - fs.close(); - logger.warn(user + " FS reset close."); - init(null); - } - } - } - } - - @Override - public boolean delete(FsPath dest) throws IOException { - String path = checkOSSPath(dest.getPath()); - return fs.delete(new Path(path), true); - } - - @Override - public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { - return fs.rename( - new Path(checkOSSPath(oldDest.getPath())), new Path(checkOSSPath(newDest.getPath()))); - } - - @Override - public void close() throws IOException { - if (null != fs) { - fs.close(); - } else { - logger.warn("FS was null, cannot close."); - } - } - - /** Utils method start */ - private FsPath fillStorageFile(FsPath fsPath, FileStatus fileStatus) throws IOException { - fsPath.setAccess_time(fileStatus.getAccessTime()); - fsPath.setModification_time(fileStatus.getModificationTime()); - fsPath.setOwner(fileStatus.getOwner()); - fsPath.setGroup(fileStatus.getGroup()); - fsPath.setIsdir(fileStatus.isDirectory()); - return fsPath; - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - private static String checkOSSPath(String path) { - try { - boolean checkOSSPath = (boolean) StorageConfiguration.OSS_PATH_PREFIX_CHECK_ON.getValue(); - if (checkOSSPath) { - boolean rmOSSPrefix = (boolean) StorageConfiguration.OSS_PATH_PREFIX_REMOVE.getValue(); - if (rmOSSPrefix) { - if (StringUtils.isBlank(path)) { - return path; - } - if (path.startsWith(OSS_PREFIX)) { - int remainIndex = OSS_PREFIX.length(); - String[] t1 = path.substring(remainIndex).split("/", 2); - if (t1.length != 2) { - logger.warn("checkOSSPath Invalid path: " + path); - return path; - } - if (logger.isDebugEnabled()) { - logger.debug("checkOSSPath ori path : {}, after path : {}", path, "/" + t1[1]); - } - return "/" + t1[1]; - } else { - return path; - } - } - } - } catch (Exception e) { - logger.warn("checkOSSPath error. msg : " + e.getMessage() + " ", e); - } - return path; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java deleted file mode 100644 index b8f6401b11..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.fs.impl; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.domain.FsPathListWithError; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.fs.FileSystem; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.*; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.AmazonS3Exception; -import com.amazonaws.services.s3.model.ListObjectsV2Result; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TO_BE_UNKNOW; - -public class S3FileSystem extends FileSystem { - private static final Logger logger = LoggerFactory.getLogger(S3FileSystem.class); - private String accessKey; - private String secretKey; - - private String endPoint; - - private String region; - - private String bucket; - - private String label; - - private AmazonS3 s3Client; - - private static final String INIT_FILE_NAME = ".s3_dir_init"; - - @Override - public void init(Map properties) throws IOException { - accessKey = StorageConfiguration.S3_ACCESS_KEY.getValue(properties); - secretKey = StorageConfiguration.S3_SECRET_KEY.getValue(properties); - endPoint = StorageConfiguration.S3_ENDPOINT.getValue(properties); - bucket = StorageConfiguration.S3_BUCKET.getValue(properties); - region = StorageConfiguration.S3_REGION.getValue(properties); - - AwsClientBuilder.EndpointConfiguration endpointConfiguration = - new AwsClientBuilder.EndpointConfiguration(endPoint, region); - - BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(accessKey, secretKey); - - AWSStaticCredentialsProvider StaticCredentials = - new AWSStaticCredentialsProvider(basicAWSCredentials); - - s3Client = - AmazonS3ClientBuilder.standard() - .withEndpointConfiguration(endpointConfiguration) - .withPathStyleAccessEnabled(true) - .withCredentials(StaticCredentials) - .build(); - } - - @Override - public String fsName() { - return StorageUtils.S3; - } - - @Override - public String rootUserName() { - return null; - } - - @Override - public FsPath get(String dest) throws IOException { - FsPath ret = new FsPath(dest); - if (exists(ret)) { - return ret; - } else { - logger.warn("File or folder does not exist or file name is garbled(文件或者文件夹不存在或者文件名乱码)"); - throw new StorageWarnException( - TO_BE_UNKNOW.getErrorCode(), - "File or folder does not exist or file name is garbled(文件或者文件夹不存在或者文件名乱码)"); - } - } - - @Override - public InputStream read(FsPath dest) throws IOException { - try { - return s3Client.getObject(bucket, dest.getPath()).getObjectContent(); - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - } - - @Override - public OutputStream write(FsPath dest, boolean overwrite) throws IOException { - try (InputStream inputStream = read(dest); - OutputStream outputStream = new S3OutputStream(s3Client, bucket, dest.getPath())) { - if (overwrite) { - IOUtils.copy(inputStream, outputStream); - } - return outputStream; - } - } - - @Override - public boolean create(String dest) throws IOException { - if (exists(new FsPath(dest))) { - return false; - } - s3Client.putObject(bucket, dest, ""); - return true; - } - - @Override - public List list(FsPath path) throws IOException { - try { - if (!StringUtils.isEmpty(path.getPath())) { - ListObjectsV2Result listObjectsV2Result = s3Client.listObjectsV2(bucket, path.getPath()); - List s3ObjectSummaries = listObjectsV2Result.getObjectSummaries(); - return s3ObjectSummaries.stream() - .filter(summary -> !isInitFile(summary)) - .map( - summary -> { - FsPath newPath = new FsPath(buildPath(summary.getKey())); - return fillStorageFile(newPath, summary); - }) - .collect(Collectors.toList()); - } - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + path.getPath()); - } - - return new ArrayList<>(); - } - - @Override - public FsPathListWithError listPathWithError(FsPath path) throws IOException { - try { - if (!StringUtils.isEmpty(path.getPath())) { - ListObjectsV2Result listObjectsV2Result = s3Client.listObjectsV2(bucket, path.getPath()); - List s3ObjectSummaries = listObjectsV2Result.getObjectSummaries(); - if (s3ObjectSummaries != null) { - List rtn = new ArrayList(); - String message = ""; - for (S3ObjectSummary summary : s3ObjectSummaries) { - if (isDir(summary, path.getPath()) || isInitFile(summary)) continue; - FsPath newPath = new FsPath(buildPath(summary.getKey())); - rtn.add(fillStorageFile(newPath, summary)); - } - return new FsPathListWithError(rtn, message); - } - } - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + path.getPath()); - } - - return null; - } - - @Override - public boolean exists(FsPath dest) throws IOException { - try { - int size = s3Client.listObjectsV2(bucket, dest.getPath()).getObjectSummaries().size(); - return size > 0; - } catch (AmazonS3Exception e) { - return false; - } - } - - @Override - public boolean delete(FsPath dest) throws IOException { - try { - s3Client.deleteObject(bucket, dest.getPath()); - return true; - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - } - - @Override - public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { - try { - s3Client.copyObject(bucket, oldDest.getPath(), bucket, newDest.getPath()); - s3Client.deleteObject(bucket, oldDest.getPath()); - return true; - } catch (AmazonS3Exception e) { - s3Client.deleteObject(bucket, newDest.getPath()); - throw new IOException( - "You have not permission to access path " - + oldDest.getPath() - + " or " - + newDest.getPath()); - } - } - - @Override - public boolean copy(String origin, String dest) throws IOException { - try { - s3Client.copyObject(bucket, origin, bucket, dest); - return true; - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + origin + " or " + dest); - } - } - - private boolean isDir(S3ObjectSummary s3ObjectSummary, String prefix) { - return s3ObjectSummary.getKey().substring(prefix.length()).contains("/"); - } - - private boolean isInitFile(S3ObjectSummary s3ObjectSummary) { - return s3ObjectSummary.getKey().contains(INIT_FILE_NAME); - } - - @Override - public String listRoot() { - return "/"; - } - - @Override - public boolean mkdir(FsPath dest) throws IOException { - String path = new File(dest.getPath(), INIT_FILE_NAME).getPath(); - if (exists(new FsPath(path))) { - return false; - } - return create(path); - } - - @Override - public boolean mkdirs(FsPath dest) throws IOException { - return mkdir(dest); - } - - private FsPath fillStorageFile(FsPath fsPath, S3ObjectSummary s3ObjectSummary) { - fsPath.setModification_time(s3ObjectSummary.getLastModified().getTime()); - fsPath.setOwner(s3ObjectSummary.getOwner().getDisplayName()); - try { - fsPath.setIsdir(isDir(s3ObjectSummary, fsPath.getParent().getPath())); - } catch (Throwable e) { - logger.warn("Failed to fill storage file:" + fsPath.getPath(), e); - } - - if (fsPath.isdir()) { - fsPath.setLength(0); - } else { - fsPath.setLength(s3ObjectSummary.getSize()); - } - return fsPath; - } - - @Override - public boolean canRead(FsPath dest) { - return true; - } - - @Override - public boolean canWrite(FsPath dest) { - return true; - } - - @Override - public long getTotalSpace(FsPath dest) { - return 0; - } - - @Override - public long getFreeSpace(FsPath dest) { - return 0; - } - - @Override - public long getUsableSpace(FsPath dest) { - return 0; - } - - @Override - public boolean canExecute(FsPath dest) { - return true; - } - - @Override - public boolean setOwner(FsPath dest, String user, String group) { - return false; - } - - @Override - public boolean setOwner(FsPath dest, String user) { - return false; - } - - @Override - public boolean setGroup(FsPath dest, String group) { - return false; - } - - @Override - public boolean setPermission(FsPath dest, String permission) { - return false; - } - - @Override - public void close() throws IOException {} - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public String buildPath(String path) { - if (path == null || "".equals(path)) return ""; - if (path.startsWith("/")) { - return StorageUtils.S3_SCHEMA + path; - } - return StorageUtils.S3_SCHEMA + "/" + path; - } -} - -class S3OutputStream extends ByteArrayOutputStream { - private AmazonS3 s3Client; - private String bucket; - private String path; - - public S3OutputStream(AmazonS3 s3Client, String bucket, String path) { - this.s3Client = s3Client; - this.bucket = bucket; - this.path = path; - } - - @Override - public void close() throws IOException { - byte[] buffer = this.toByteArray(); - try (InputStream in = new ByteArrayInputStream(buffer)) { - s3Client.putObject(bucket, path, in, new ObjectMetadata()); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java deleted file mode 100644 index 8f0c4016d1..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.io; - -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; - -import org.springframework.cglib.proxy.MethodInterceptor; - -public class IOMethodInterceptorFactory { - - private static IOMethodInterceptorCreator interceptorCreator = null; - - private IOMethodInterceptorFactory() {} - - /** - * This method is called when ioClient is initialized. ioClient初始化时会调用该方法 - * - * @param interceptorCreator - */ - public static void register(IOMethodInterceptorCreator interceptorCreator) { - IOMethodInterceptorFactory.interceptorCreator = interceptorCreator; - } - - public static MethodInterceptor getIOMethodInterceptor(String fsName) - throws StorageWarnException { - if (IOMethodInterceptorFactory.interceptorCreator == null) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM.getErrorCode(), - LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM.getErrorDesc()); - } - return IOMethodInterceptorFactory.interceptorCreator.createIOMethodInterceptor(fsName); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java deleted file mode 100644 index db78afac29..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.io.InputStream; -import java.text.MessageFormat; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.THE_FILE_IS_EMPTY; - -public class DefaultResultSetFactory implements ResultSetFactory { - - private static final Logger logger = LoggerFactory.getLogger(DefaultResultSetFactory.class); - - private final Map>> resultClasses; - - private final String[] resultTypes; - - public DefaultResultSetFactory() { - resultClasses = - StorageUtils.loadClasses( - StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue(), - StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue(), - t -> { - try { - return t.newInstance().resultSetType().toLowerCase(Locale.getDefault()); - } catch (InstantiationException e) { - logger.warn("DefaultResultSetFactory init failed", e); - } catch (IllegalAccessException e) { - logger.warn("DefaultResultSetFactory init failed", e); - } - return null; - }); - resultTypes = ResultSetFactory.resultSetType.keySet().toArray(new String[0]); - } - - @Override - public ResultSet getResultSetByType(String resultSetType) { - if (!resultClasses.containsKey(resultSetType)) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorCode(), - MessageFormat.format( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorDesc(), resultSetType)); - } - try { - return resultClasses.get(resultSetType).newInstance(); - } catch (InstantiationException | IllegalAccessException e) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorCode(), - MessageFormat.format( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorDesc(), resultSetType), - e); - } - } - - @Override - public ResultSet getResultSetByPath(FsPath fsPath) - throws StorageWarnException { - return getResultSetByPath(fsPath, StorageUtils.getJvmUser()); - } - - @Override - public ResultSet getResultSetByContent(String content) { - return getResultSetByType(Dolphin.getType(content)); - } - - @Override - public boolean exists(String resultSetType) { - return resultClasses.containsKey(resultSetType); - } - - @Override - public boolean isResultSetPath(String path) { - return path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX); - } - - @Override - public boolean isResultSet(String content) { - try { - return resultClasses.containsKey(Dolphin.getType(content)); - } catch (Exception e) { - logger.info("Wrong result Set: " + e.getMessage()); - return false; - } - } - - @Override - public ResultSet getResultSet(String output) - throws StorageWarnException { - return getResultSet(output, StorageUtils.getJvmUser()); - } - - @Override - public String[] getResultSetType() { - return Arrays.copyOf(resultTypes, resultTypes.length); - } - - @Override - public ResultSet getResultSetByPath(FsPath fsPath, Fs fs) { - try (InputStream inputStream = fs.read(fsPath)) { - String resultSetType = Dolphin.getType(inputStream); - if (StringUtils.isEmpty(resultSetType)) { - throw new StorageWarnException( - THE_FILE_IS_EMPTY.getErrorCode(), - MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc(), fsPath.getPath())); - } - // Utils.tryQuietly(fs::close); - return getResultSetByType(resultSetType); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public ResultSet getResultSetByPath( - FsPath fsPath, String proxyUser) { - if (fsPath == null) { - return null; - } - logger.info("Get Result Set By Path:" + fsPath.getPath()); - try (Fs fs = FSFactory.getFsByProxyUser(fsPath, proxyUser)) { - fs.init(new HashMap<>()); - try (InputStream inputStream = fs.read(fsPath)) { - String resultSetType = Dolphin.getType(inputStream); - if (StringUtils.isEmpty(resultSetType)) { - throw new StorageWarnException( - THE_FILE_IS_EMPTY.getErrorCode(), - MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc(), fsPath.getPath())); - } - IOUtils.closeQuietly(inputStream); - return getResultSetByType(resultSetType); - } catch (IOException e) { - throw new RuntimeException(e); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public ResultSet getResultSet( - String output, String proxyUser) { - if (isResultSetPath(output)) { - return getResultSetByPath(new FsPath(output), proxyUser); - } else if (isResultSet(output)) { - return getResultSetByContent(output); - } else { - return null; - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java deleted file mode 100644 index ed65cea16c..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.*; -import org.apache.linkis.common.io.resultset.ResultSet; - -import java.util.LinkedHashMap; -import java.util.Map; - -public interface ResultSetFactory { - - String TEXT_TYPE = "1"; - String TABLE_TYPE = "2"; - String IO_TYPE = "3"; - String PICTURE_TYPE = "4"; - String HTML_TYPE = "5"; - - /** TODO 修改为注册形式,并修改ResultSet的getResultType逻辑 Result set corresponding type record(结果集对应类型记录) */ - Map resultSetType = - new LinkedHashMap() { - { - put(TEXT_TYPE, "TEXT"); - put(TABLE_TYPE, "TABLE"); - put(IO_TYPE, "IO"); - put(PICTURE_TYPE, "PICTURE"); - put(HTML_TYPE, "HTML"); - } - }; - - DefaultResultSetFactory factory = new DefaultResultSetFactory(); - - static ResultSetFactory getInstance() { - return factory; - } - - ResultSet getResultSetByType(String resultSetType); - - ResultSet getResultSetByPath(FsPath fsPath); - - ResultSet getResultSetByPath(FsPath fsPath, Fs fs); - - ResultSet getResultSetByContent(String content); - - boolean exists(String resultSetType); - - boolean isResultSetPath(String path); - - boolean isResultSet(String content); - - ResultSet getResultSet(String output); - - ResultSet getResultSetByPath( - FsPath fsPath, String proxyUser); - - ResultSet getResultSet(String output, String proxyUser); - - String[] getResultSetType(); -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java deleted file mode 100644 index 3047b715a0..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.resultset.table.TableResultSet; - -import java.io.IOException; -import java.io.InputStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ResultSetReaderFactory { - private static final Logger logger = LoggerFactory.getLogger(ResultSetReaderFactory.class); - - public static ResultSetReader getResultSetReader( - ResultSet resultSet, InputStream inputStream) { - return new StorageResultSetReader<>(resultSet, inputStream); - } - - public static ResultSetReader getResultSetReader( - ResultSet resultSet, String value) { - return new StorageResultSetReader<>(resultSet, value); - } - - public static ResultSetReader getResultSetReader(String res) { - ResultSetFactory rsFactory = ResultSetFactory.getInstance(); - if (rsFactory.isResultSet(res)) { - ResultSet resultSet = rsFactory.getResultSet(res); - return ResultSetReaderFactory.getResultSetReader(resultSet, res); - } else { - FsPath resPath = new FsPath(res); - ResultSet resultSet = - rsFactory.getResultSetByPath(resPath); - try { - FSFactory.getFs(resPath).init(null); - } catch (IOException e) { - logger.warn("ResultSetReaderFactory fs init failed", e); - } - ResultSetReader reader = null; - try { - reader = - ResultSetReaderFactory.getResultSetReader( - resultSet, FSFactory.getFs(resPath).read(resPath)); - } catch (IOException e) { - logger.warn("ResultSetReaderFactory fs read failed", e); - } - if (reader instanceof StorageResultSetReader) { - ((StorageResultSetReader) reader).setFs(FSFactory.getFs(resPath)); - } - return (StorageResultSetReader) reader; - } - } - - public static ResultSetReader getTableResultReader(String res) { - ResultSetFactory rsFactory = ResultSetFactory.getInstance(); - if (rsFactory.isResultSet(res)) { - ResultSet resultSet = rsFactory.getResultSet(res); - if (!ResultSetFactory.TABLE_TYPE.equals(resultSet.resultSetType())) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorCode(), - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorDesc()); - } - return ResultSetReaderFactory.getResultSetReader( - (TableResultSet) resultSet, res); - } else { - FsPath resPath = new FsPath(res); - ResultSet resultSet = rsFactory.getResultSetByPath(resPath); - if (!ResultSetFactory.TABLE_TYPE.equals(resultSet.resultSetType())) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorCode(), - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorDesc()); - } - - Fs fs = FSFactory.getFs(resPath); - logger.info("Try to init Fs with path:{}", resPath.getPath()); - try { - fs.init(null); - InputStream read = fs.read(resPath); - - return ResultSetReaderFactory.getResultSetReader( - (TableResultSet) resultSet, read); - } catch (IOException e) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorCode(), - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorDesc()); - } - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java deleted file mode 100644 index 1abeaf0937..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ResultSetWriterFactory { - private static final Logger logger = LoggerFactory.getLogger(ResultSetWriterFactory.class); - - public static - org.apache.linkis.common.io.resultset.ResultSetWriter getResultSetWriter( - ResultSet resultSet, long maxCacheSize, FsPath storePath) { - return new StorageResultSetWriter<>(resultSet, maxCacheSize, storePath); - } - - public static - org.apache.linkis.common.io.resultset.ResultSetWriter getResultSetWriter( - ResultSet resultSet, long maxCacheSize, FsPath storePath, String proxyUser) { - StorageResultSetWriter writer = - new StorageResultSetWriter<>(resultSet, maxCacheSize, storePath); - writer.setProxyUser(proxyUser); - return writer; - } - - public static Record[] getRecordByWriter( - org.apache.linkis.common.io.resultset.ResultSetWriter - writer, - long limit) { - String res = writer.toString(); - return getRecordByRes(res, limit); - } - - public static Record[] getRecordByRes(String res, long limit) { - ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(res); - int count = 0; - List records = new ArrayList<>(); - try { - reader.getMetaData(); - while (reader.hasNext() && count < limit) { - records.add(reader.getRecord()); - count++; - } - } catch (IOException e) { - logger.warn("ResultSetWriter getRecordByRes failed", e); - } - return records.toArray(new Record[0]); - } - - public static Record getLastRecordByRes(String res) { - ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(res); - Record record = null; - try { - reader.getMetaData(); - while (reader.hasNext()) { - record = reader.getRecord(); - } - } catch (IOException e) { - logger.warn("ResultSetWriter getLastRecordByRes failed", e); - } - return record; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java deleted file mode 100644 index c83661de2e..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.utils.StorageConfiguration; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class StorageResultSet - implements ResultSet { - - private static final Logger logger = LoggerFactory.getLogger(StorageResultSet.class); - - private byte[] resultHeaderBytes = null; - - { - byte[] arr2 = Dolphin.getIntBytes(Integer.parseInt(resultSetType())); - byte[] mergedArray = new byte[Dolphin.MAGIC_BYTES.length + arr2.length]; - System.arraycopy(Dolphin.MAGIC_BYTES, 0, mergedArray, 0, Dolphin.MAGIC_BYTES.length); - System.arraycopy(arr2, 0, mergedArray, Dolphin.MAGIC_BYTES.length, arr2.length); - resultHeaderBytes = mergedArray; - } - - @Override - public String charset() { - return StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue(); - } - - @Override - public FsPath getResultSetPath(FsPath parentDir, String fileName) { - final String path = - parentDir.getPath().endsWith("/") - ? parentDir.getUriString() + fileName + Dolphin.DOLPHIN_FILE_SUFFIX - : parentDir.getUriString() + "/" + fileName + Dolphin.DOLPHIN_FILE_SUFFIX; - logger.info("Get result set path: {}", path); - return new FsPath(path); - } - - @Override - public byte[] getResultSetHeader() { - return resultHeaderBytes; - } - - @Override - public boolean belongToPath(String path) { - return path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX); - } - - @Override - public boolean belongToResultSet(String content) { - try { - return Dolphin.getType(content).equals(resultSetType()); - } catch (Exception e) { - logger.info("Wrong result set: ", e); - return false; - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java deleted file mode 100644 index c0222cc848..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageResultSetReader - extends ResultSetReader { - - private static final Logger logger = LoggerFactory.getLogger(StorageResultSetReader.class); - - private final ResultSet resultSet; - private final InputStream inputStream; - private final ResultDeserializer deserializer; - private K metaData; - private Record row; - private int colCount = 0; - private int rowCount = 0; - private Fs fs; - - private final int READ_CACHE = 1024; - - public StorageResultSetReader(ResultSet resultSet, InputStream inputStream) { - super(resultSet, inputStream); - this.resultSet = resultSet; - this.inputStream = inputStream; - this.deserializer = resultSet.createResultSetDeserializer(); - } - - public StorageResultSetReader(ResultSet resultSet, String value) { - this(resultSet, new ByteArrayInputStream(value.getBytes(Dolphin.CHAR_SET))); - } - - public void init() throws IOException { - String resType = Dolphin.getType(inputStream); - if (!StringUtils.equals(resultSet.resultSetType(), resType)) { - throw new RuntimeException( - "File type does not match(文件类型不匹配): " - + ResultSetFactory.resultSetType.getOrDefault(resType, "TABLE")); - } - } - - public byte[] readLine() { - int rowLen = 0; - try { - rowLen = Dolphin.readInt(inputStream); - } catch (StorageWarnException | IOException e) { - logger.info("Read finished(读取完毕)"); - return null; - } - - int len = 0; - byte[] rowBuffer = null; - try { - rowBuffer = new byte[rowLen]; - len = StorageUtils.readBytes(inputStream, rowBuffer, rowLen); - } catch (OutOfMemoryError error) { - logger.error("Result set read oom, read size {} Byte", rowLen); - throw new RuntimeException(error); - } - if (len != rowLen) { - throw new RuntimeException( - "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)"); - } - rowCount++; - return rowBuffer; - } - - @Override - public Record getRecord() { - if (metaData == null) throw new RuntimeException("Must read metadata first(必须先读取metadata)"); - if (row == null) { - throw new RuntimeException( - "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)"); - } - return row; - } - - public void setFs(Fs fs) { - this.fs = fs; - } - - public Fs getFs() { - return fs; - } - - @Override - public MetaData getMetaData() { - if (metaData == null) { - try { - init(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - metaData = deserializer.createMetaData(readLine()); - return metaData; - } - - @Override - public int skip(int recordNum) throws IOException { - if (recordNum < 0) return -1; - - if (metaData == null) getMetaData(); - for (int i = recordNum; i > 0; i--) { - try { - inputStream.skip(Dolphin.readInt(inputStream)); - } catch (Throwable t) { - return recordNum - i; - } - } - return recordNum; - } - - @Override - public long getPosition() throws IOException { - return rowCount; - } - - @Override - public boolean hasNext() throws IOException { - if (metaData == null) getMetaData(); - byte[] line = readLine(); - if (line == null) return false; - row = deserializer.createRecord(line); - if (row == null) return false; - return true; - } - - @Override - public long available() throws IOException { - return inputStream.available(); - } - - @Override - public void close() throws IOException { - IOUtils.closeQuietly(inputStream); - if (this.fs != null) { - this.fs.close(); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java deleted file mode 100644 index 5109ed44df..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.*; -import org.apache.linkis.common.io.resultset.*; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.common.utils.*; -import org.apache.linkis.storage.*; -import org.apache.linkis.storage.conf.*; -import org.apache.linkis.storage.domain.*; -import org.apache.linkis.storage.utils.*; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageResultSetWriter - extends ResultSetWriter { - private static final Logger logger = LoggerFactory.getLogger(StorageResultSetWriter.class); - - private final ResultSet resultSet; - private final long maxCacheSize; - private final FsPath storePath; - - private final ResultSerializer serializer; - private boolean moveToWriteRow = false; - private OutputStream outputStream = null; - private int rowCount = 0; - private final List buffer = new ArrayList(); - private Fs fs = null; - private MetaData rMetaData = null; - private String proxyUser = StorageUtils.getJvmUser(); - private boolean fileCreated = false; - private boolean closed = false; - private final Object WRITER_LOCK_CREATE = new Object(); - private final Object WRITER_LOCK_CLOSE = new Object(); - - public StorageResultSetWriter(ResultSet resultSet, long maxCacheSize, FsPath storePath) { - super(resultSet, maxCacheSize, storePath); - this.resultSet = resultSet; - this.maxCacheSize = maxCacheSize; - this.storePath = storePath; - - this.serializer = resultSet.createResultSetSerializer(); - } - - public MetaData getMetaData() { - return rMetaData; - } - - public void setProxyUser(String proxyUser) { - this.proxyUser = proxyUser; - } - - public boolean isEmpty() { - return rMetaData == null && buffer.size() <= Dolphin.FILE_EMPTY; - } - - public void init() { - try { - writeLine(resultSet.getResultSetHeader(), true); - } catch (IOException e) { - logger.warn("StorageResultSetWriter init failed", e); - } - } - - public void createNewFile() { - if (!fileCreated) { - synchronized (WRITER_LOCK_CREATE) { - if (!fileCreated) { - if (storePath != null && outputStream == null) { - logger.info("Try to create a new file:{}, with proxy user:{}", storePath, proxyUser); - fs = FSFactory.getFsByProxyUser(storePath, proxyUser); - try { - fs.init(null); - FileSystemUtils.createNewFile(storePath, proxyUser, true); - outputStream = fs.write(storePath, true); - } catch (IOException e) { - logger.warn("StorageResultSetWriter createNewFile failed", e); - } - logger.info("Succeed to create a new file:{}", storePath); - fileCreated = true; - } - } - } - } else if (storePath != null && outputStream == null) { - logger.warn("outputStream had been set null, but createNewFile() was called again."); - } - } - - public void writeLine(byte[] bytes, boolean cache) throws IOException { - if (closed) { - logger.warn("the writer had been closed, but writeLine() was still called."); - return; - } - if (bytes.length > LinkisStorageConf.ROW_BYTE_MAX_LEN) { - throw new IOException( - String.format( - "A single row of data cannot exceed %s", LinkisStorageConf.ROW_BYTE_MAX_LEN_STR)); - } - if (buffer.size() > maxCacheSize && !cache) { - if (outputStream == null) { - createNewFile(); - } - flush(); - outputStream.write(bytes); - } else { - for (byte b : bytes) { - buffer.add(b); - } - } - } - - @Override - public String toString() { - if (outputStream == null) { - if (isEmpty()) { - return ""; - } - - byte[] byteArray = getBytes(); - return new String(byteArray, Dolphin.CHAR_SET); - } - return storePath.getSchemaPath(); - } - - private byte[] getBytes() { - byte[] byteArray = new byte[buffer.size()]; - for (int i = 0; i < buffer.size(); i++) { - byteArray[i] = buffer.get(i); - } - return byteArray; - } - - @Override - public FsPath toFSPath() { - return storePath; - } - - @Override - public void addMetaDataAndRecordString(String content) { - if (!moveToWriteRow) { - byte[] bytes = content.getBytes(Dolphin.CHAR_SET); - try { - writeLine(bytes, false); - } catch (IOException e) { - logger.warn("addMetaDataAndRecordString failed", e); - } - } - moveToWriteRow = true; - } - - @Override - public void addRecordString(String content) {} - - @Override - public void addMetaData(MetaData metaData) throws IOException { - if (!moveToWriteRow) { - rMetaData = metaData; - init(); - if (metaData == null) { - writeLine(serializer.metaDataToBytes(metaData), true); - } else { - writeLine(serializer.metaDataToBytes(metaData), false); - } - moveToWriteRow = true; - } - } - - @Override - public void addRecord(Record record) { - if (moveToWriteRow) { - rowCount++; - try { - writeLine(serializer.recordToBytes(record), false); - } catch (IOException e) { - logger.warn("addMetaDataAndRecordString failed", e); - } - } - } - - public void closeFs() { - if (fs != null) { - IOUtils.closeQuietly(fs); - fs = null; - } - } - - @Override - public void close() { - if (closed) { - logger.warn("the writer had been closed, but close() was still called."); - return; - } - synchronized (WRITER_LOCK_CLOSE) { - if (!closed) { - closed = true; - } else { - return; - } - } - try { - if (outputStream != null) { - flush(); - } - } finally { - if (outputStream != null) { - IOUtils.closeQuietly(outputStream); - outputStream = null; - } - closeFs(); - } - } - - @Override - public void flush() { - createNewFile(); - if (outputStream != null) { - try { - if (!buffer.isEmpty()) { - outputStream.write(getBytes()); - buffer.clear(); - } - if (outputStream instanceof HdfsDataOutputStream) { - ((HdfsDataOutputStream) outputStream).hflush(); - } else { - outputStream.flush(); - } - } catch (IOException e) { - logger.warn("Error encountered when flush result set", e); - } - } - if (closed && logger.isDebugEnabled()) { - logger.debug("the writer had been closed, but flush() was still called."); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java deleted file mode 100644 index 00c0e7b2a7..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.html; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; -import org.apache.linkis.storage.resultset.txt.TextResultDeserializer; -import org.apache.linkis.storage.resultset.txt.TextResultSerializer; - -import java.io.Serializable; - -public class HtmlResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.HTML_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TextResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TextResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java deleted file mode 100644 index 02d83aa7bb..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.io; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.storage.domain.Dolphin; - -import org.apache.commons.codec.binary.Base64; - -public class IOResultDeserializer extends ResultDeserializer { - - @Override - public IOMetaData createMetaData(byte[] bytes) { - String[] values = Dolphin.getString(bytes, 0, bytes.length).split(Dolphin.COL_SPLIT); - return new IOMetaData(Integer.parseInt(values[0]), Integer.parseInt(values[1])); - } - - @Override - public IORecord createRecord(byte[] bytes) { - return new IORecord(Base64.decodeBase64(Dolphin.getString(bytes, 0, bytes.length))); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java deleted file mode 100644 index 2401e361a5..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.io; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.codec.binary.Base64; - -public class IOResultSerializer extends ResultSerializer { - - @Override - public byte[] metaDataToBytes(MetaData metaData) { - IOMetaData ioMetaData = (IOMetaData) metaData; - return lineToBytes(ioMetaData.off + Dolphin.COL_SPLIT + ioMetaData.len); - } - - @Override - public byte[] recordToBytes(Record record) { - IORecord ioRecord = (IORecord) record; - return lineToBytes(Base64.encodeBase64String(ioRecord.value)); - } - - private byte[] lineToBytes(String value) { - byte[] bytes = value == null ? Dolphin.NULL_BYTES : Dolphin.getBytes(value); - byte[] intBytes = Dolphin.getIntBytes(bytes.length); - return StorageUtils.mergeByteArrays(intBytes, bytes); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java deleted file mode 100644 index 67f8e76904..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.io; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; - -import java.io.Serializable; - -public class IOResultSet extends StorageResultSet implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.IO_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new IOResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new IOResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java deleted file mode 100644 index 5e73592a7b..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.picture; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; -import org.apache.linkis.storage.resultset.txt.TextResultDeserializer; -import org.apache.linkis.storage.resultset.txt.TextResultSerializer; - -import java.io.Serializable; - -public class PictureResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.PICTURE_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TextResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TextResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java deleted file mode 100644 index 429ab33c82..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.table; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.resultset.ResultMetaData; - -public class TableMetaData implements ResultMetaData { - - public Column[] columns; - - public TableMetaData(Column[] columns) { - this.columns = columns; - } - - public Column[] getColumns() { - return columns; - } - - public void setColumns(Column[] columns) { - this.columns = columns; - } - - @Override - public MetaData cloneMeta() { - return new TableMetaData(columns.clone()); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java deleted file mode 100644 index 7e1d6c35fe..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.table; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.exception.StorageWarnException; - -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.PARSING_METADATA_FAILED; - -public class TableResultDeserializer extends ResultDeserializer { - private static final Logger logger = LoggerFactory.getLogger(TableResultDeserializer.class); - - private TableMetaData metaData; - - @Override - public TableMetaData createMetaData(byte[] bytes) { - int colByteLen = Integer.parseInt(Dolphin.getString(bytes, 0, Dolphin.INT_LEN)); - String colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen); - String[] colArray = - colString.endsWith(Dolphin.COL_SPLIT) - ? colString.substring(0, colString.length() - 1).split(Dolphin.COL_SPLIT) - : colString.split(Dolphin.COL_SPLIT); - int index = Dolphin.INT_LEN + colByteLen; - if (colArray.length % 3 != 0) { - throw new StorageWarnException( - PARSING_METADATA_FAILED.getErrorCode(), PARSING_METADATA_FAILED.getErrorDesc()); - } - List columns = new ArrayList<>(); - for (int i = 0; i < colArray.length; i += 3) { - int len = Integer.parseInt(colArray[i]); - String colName = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)); - index += len; - len = Integer.parseInt(colArray[i + 1]); - String colType = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)); - index += len; - len = Integer.parseInt(colArray[i + 2]); - String colComment = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)); - index += len; - columns.add(new Column(colName, DataType.toDataType(colType), colComment)); - } - metaData = new TableMetaData(columns.toArray(new Column[0])); - return metaData; - } - - /** - * colByteLen:All column fields are long(所有列字段长 记录的长度) colString:Obtain column - * length(获得列长):10,20,21 colArray:Column length array(列长数组) Get data by column length(通过列长获得数据) - * - * @param bytes - * @return - */ - @Override - public TableRecord createRecord(byte[] bytes) { - int colByteLen = Integer.parseInt(Dolphin.getString(bytes, 0, Dolphin.INT_LEN)); - String colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen); - String[] colArray; - if (colString.endsWith(Dolphin.COL_SPLIT)) { - colArray = colString.substring(0, colString.length() - 1).split(Dolphin.COL_SPLIT); - } else { - colArray = colString.split(Dolphin.COL_SPLIT); - } - int index = Dolphin.INT_LEN + colByteLen; - Object[] data = new Object[colArray.length]; - for (int i = 0; i < colArray.length; i++) { - int len = Integer.parseInt(colArray[i]); - String res = Dolphin.getString(bytes, index, len); - index += len; - if (i >= metaData.columns.length) { - data[i] = res; - } else { - data[i] = DataType.toValue(metaData.columns[i].getDataType(), res); - } - } - return new TableRecord(data); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java deleted file mode 100644 index 5f40aa33f3..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.table; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.Dolphin; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class TableResultSerializer extends ResultSerializer { - - @Override - public byte[] metaDataToBytes(MetaData metaData) { - TableMetaData tableMetaData = (TableMetaData) metaData; - Object[] objects = - Arrays.stream(tableMetaData.columns).map(Column::toArray).flatMap(Arrays::stream).toArray(); - return lineToBytes(objects); - } - - @Override - public byte[] recordToBytes(Record record) { - TableRecord tableRecord = (TableRecord) record; - return lineToBytes(tableRecord.row); - } - - /** - * Convert a row of data to an array of Bytes Convert the data to byte and get the corresponding - * total byte length to write to the file Data write format: line length (fixed length) column - * length (fixed length) field index comma segmentation real data For example: - * 000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 The length of the line does not - * include its own length 将一行数据转换为Bytes的数组 对数据转换为byte,并获取相应的总byte长度写入文件 数据写入格式:行长(固定长度) 列长(固定长度) - * 字段索引逗号分割 真实数据 如:000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 其中行长不包括自身长度 - * - * @param line - */ - private byte[] lineToBytes(Object[] line) { - // Data cache(数据缓存) - List dataBytes = new ArrayList<>(); - // Column cache(列缓存) - List colIndex = new ArrayList<>(); - int colByteLen = 0; - int length = 0; - for (Object data : line) { - byte[] bytes = data == null ? Dolphin.LINKIS_NULL_BYTES : Dolphin.getBytes(data); - dataBytes.add(bytes); - byte[] colBytes = Dolphin.getBytes(bytes.length); - colIndex.add(colBytes); - colIndex.add(Dolphin.COL_SPLIT_BYTES); - colByteLen += colBytes.length + Dolphin.COL_SPLIT_LEN; - length += bytes.length; - } - length += colByteLen + Dolphin.INT_LEN; - return toByteArray(length, colByteLen, colIndex, dataBytes); - } - - /** - * Splice a row of data into a byte array(将一行的数据拼接成byte数组) - * - * @param length The total length of the line data byte, excluding its own - * length(行数据byte总长度,不包括自身的长度) - * @param colByteLen Record field index byte column length(记录字段索引byte的列长) - * @param colIndex Field index, including separator comma(字段索引,包括分割符逗号) - * @param dataBytes Byte of real data(真实数据的byte) - * @return - */ - public static byte[] toByteArray( - int length, int colByteLen, List colIndex, List dataBytes) { - List row = new ArrayList<>(); - colIndex.addAll(dataBytes); - - for (byte intByte : Dolphin.getIntBytes(length)) { - row.add(intByte); - } - - for (byte colByte : Dolphin.getIntBytes(colByteLen)) { - row.add(colByte); - } - - colIndex.forEach( - bytes -> { - for (byte b : bytes) { - row.add(b); - } - }); - byte[] result = new byte[row.size()]; - for (int i = 0; i < result.length; i++) { - result[i] = row.get(i); - } - return result; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java deleted file mode 100644 index 3165e5af86..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.txt; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.domain.Dolphin; - -public class TextResultDeserializer extends ResultDeserializer { - - @Override - public LineMetaData createMetaData(byte[] bytes) { - return new LineMetaData(Dolphin.getString(bytes, 0, bytes.length)); - } - - @Override - public LineRecord createRecord(byte[] bytes) { - return new LineRecord(Dolphin.getString(bytes, 0, bytes.length)); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java deleted file mode 100644 index 5555ad9eba..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.txt; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.utils.StorageUtils; - -public class TextResultSerializer extends ResultSerializer { - - @Override - public byte[] metaDataToBytes(MetaData metaData) { - if (metaData == null) { - return lineToBytes(null); - } else { - LineMetaData textMetaData = (LineMetaData) metaData; - return lineToBytes(textMetaData.getMetaData()); - } - } - - @Override - public byte[] recordToBytes(Record record) { - LineRecord textRecord = (LineRecord) record; - return lineToBytes(textRecord.getLine()); - } - - private byte[] lineToBytes(String value) { - byte[] bytes = (value == null) ? Dolphin.NULL_BYTES : Dolphin.getBytes(value); - return StorageUtils.mergeByteArrays(Dolphin.getIntBytes(bytes.length), bytes); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java deleted file mode 100644 index 19fd8f9dbc..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.txt; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; - -import java.io.Serializable; - -public class TextResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.TEXT_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TextResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TextResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java deleted file mode 100644 index abfbae9c99..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import org.apache.linkis.storage.script.compaction.PYScriptCompaction; -import org.apache.linkis.storage.script.compaction.QLScriptCompaction; -import org.apache.linkis.storage.script.compaction.ScalaScriptCompaction; -import org.apache.linkis.storage.script.compaction.ShellScriptCompaction; - -public interface Compaction { - String prefixConf(); - - String prefix(); - - boolean belongTo(String suffix); - - String compact(Variable variable); - - public static Compaction[] listCompactions() { - return new Compaction[] { - new PYScriptCompaction(), - new QLScriptCompaction(), - new ScalaScriptCompaction(), - new ShellScriptCompaction() - }; - } - - String getAnnotationSymbol(); -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java deleted file mode 100644 index 58edf3dfe9..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import org.apache.linkis.storage.script.parser.PYScriptParser; -import org.apache.linkis.storage.script.parser.QLScriptParser; -import org.apache.linkis.storage.script.parser.ScalaScriptParser; -import org.apache.linkis.storage.script.parser.ShellScriptParser; - -public class ParserFactory { - public static Parser[] listParsers() { - return new Parser[] { - new PYScriptParser(), new QLScriptParser(), new ScalaScriptParser(), new ShellScriptParser() - }; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java deleted file mode 100644 index 00b79eb7e4..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.FsReader; -import org.apache.linkis.storage.script.reader.StorageScriptFsReader; - -import java.io.InputStream; - -public abstract class ScriptFsReader extends FsReader { - - protected FsPath path; - protected String charset; - - public ScriptFsReader(FsPath path, String charset) { - this.path = path; - this.charset = charset; - } - - public static ScriptFsReader getScriptFsReader( - FsPath path, String charset, InputStream inputStream) { - return new StorageScriptFsReader(path, charset, inputStream); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java deleted file mode 100644 index cd10746457..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import java.util.*; - -public class VariableParser { - - public static final String CONFIGURATION = "configuration"; - public static final String VARIABLE = "variable"; - public static final String RUNTIME = "runtime"; - public static final String STARTUP = "startup"; - public static final String SPECIAL = "special"; - - public static Variable[] getVariables(Map params) { - List variables = new ArrayList<>(); - Map variableMap = - (Map) params.getOrDefault(VARIABLE, new HashMap()); - for (Map.Entry entry : variableMap.entrySet()) { - variables.add(new Variable(VARIABLE, null, entry.getKey(), entry.getValue().toString())); - } - - Map configurationMap = - (Map) params.getOrDefault(CONFIGURATION, new HashMap()); - for (Map.Entry entry : configurationMap.entrySet()) { - Map subMap = (Map) entry.getValue(); - for (Map.Entry subEntry : subMap.entrySet()) { - if (!isContextIDINFO(subEntry.getKey())) { - Object value = subEntry.getValue(); - if (value instanceof Map) { - Map innerMap = (Map) value; - for (Map.Entry innerEntry : innerMap.entrySet()) { - if (!isContextIDINFO(innerEntry.getKey())) { - variables.add( - new Variable( - entry.getKey(), - subEntry.getKey(), - innerEntry.getKey(), - innerEntry.getValue().toString())); - } - } - } else { - if (value == null) { - variables.add(new Variable(CONFIGURATION, entry.getKey(), subEntry.getKey(), "")); - } else { - variables.add( - new Variable(CONFIGURATION, entry.getKey(), subEntry.getKey(), value.toString())); - } - } - } - } - } - return variables.toArray(new Variable[variables.size()]); - } - - private static boolean isContextIDINFO(String key) { - return "contextID".equalsIgnoreCase(key) || "nodeName".equalsIgnoreCase(key); - } - - public static Map getMap(Variable[] variables) { - Map variableKey2Value = new HashMap<>(); - Map confs = new HashMap<>(); - - Arrays.stream(variables) - .filter(variable -> variable.sort == null) - .forEach(v -> variableKey2Value.put(v.key, v.value)); - - Arrays.stream(variables) - .filter(variable -> variable.sort != null) - .forEach( - v -> { - switch (v.getSort()) { - case STARTUP: - case RUNTIME: - case SPECIAL: - if (!confs.containsKey(v.getSort())) { - confs.put(v.getSort(), createMap(v)); - } else { - Map subMap = (Map) confs.get(v.getSort()); - subMap.put(v.getKey(), v.getValue()); - } - break; - default: - if (!confs.containsKey(v.getSortParent())) { - Map subMap = new HashMap<>(); - subMap.put(v.getSort(), createMap(v)); - confs.put(v.getSortParent(), subMap); - } else { - Map subMap = (Map) confs.get(v.getSortParent()); - if (!subMap.containsKey(v.getSort())) { - subMap.put(v.getSort(), createMap(v)); - } else { - Map innerMap = (Map) subMap.get(v.getSort()); - innerMap.put(v.getKey(), v.getValue()); - } - } - break; - } - }); - - Map params = new HashMap<>(); - if (!variableKey2Value.isEmpty()) { - params.put(VARIABLE, variableKey2Value); - } - if (!confs.isEmpty()) { - params.put(CONFIGURATION, confs); - } - return params; - } - - private static Map createMap(Variable variable) { - Map map = new HashMap<>(); - map.put(variable.getKey(), variable.getValue()); - return map; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java deleted file mode 100644 index 4022e7d46d..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class PYScriptCompaction extends CommonScriptCompaction { - - private static final PYScriptCompaction pYScriptCompaction = new PYScriptCompaction(); - - public static CommonScriptCompaction apply() { - return pYScriptCompaction; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON()); - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public String prefixConf() { - return "#conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java deleted file mode 100644 index 97fc29efa0..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class QLScriptCompaction extends CommonScriptCompaction { - - private static final QLScriptCompaction qLScriptCompaction = new QLScriptCompaction(); - - public static CommonScriptCompaction apply() { - return qLScriptCompaction; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL()); - } - - @Override - public String prefix() { - return "--@set"; - } - - @Override - public String prefixConf() { - return "--conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java deleted file mode 100644 index 18d5d2f531..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ScalaScriptCompaction extends CommonScriptCompaction { - - private static final ScalaScriptCompaction compaction = new ScalaScriptCompaction(); - - public static CommonScriptCompaction apply() { - return compaction; - } - - @Override - public String prefix() { - return "//@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA()); - } - - @Override - public String prefixConf() { - return "//conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java deleted file mode 100644 index 085815b559..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ShellScriptCompaction extends CommonScriptCompaction { - - private static final ShellScriptCompaction shellScriptCompaction = new ShellScriptCompaction(); - - public static CommonScriptCompaction apply() { - return shellScriptCompaction; - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public String prefixConf() { - return "#conf@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL()); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java deleted file mode 100644 index 0d7e9f58e1..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.script.Parser; -import org.apache.linkis.storage.script.Variable; -import org.apache.linkis.storage.script.VariableParser; - -import java.util.Arrays; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public abstract class CommonScriptParser implements Parser { - - @Override - public Variable parse(String line) { - String variableReg = "\\s*" + prefix() + "\\s*(.+)\\s*" + "=" + "\\s*(.+)\\s*"; - Pattern pattern = Pattern.compile(variableReg); - Matcher matcher = pattern.matcher(line); - if (matcher.matches()) { - String key = matcher.group(1).trim(); - String value = matcher.group(2).trim(); - return new Variable(VariableParser.VARIABLE, null, key, value); - - } else { - String[] splitLine = line.split("="); - if (splitLine.length != 2) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorCode(), - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorDesc()); - } - String[] subSplit = - Arrays.stream(splitLine[0].split(" ")) - .filter(str -> !"".equals(str)) - .toArray(String[]::new); - if (subSplit.length != 4) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorCode(), - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorDesc()); - } - if (!subSplit[0].trim().equals(prefixConf())) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorCode(), - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorDesc()); - } - String sortParent = subSplit[1].trim(); - String sort = subSplit[2].trim(); - String key = subSplit[3].trim(); - String value = splitLine[1].trim(); - return new Variable(sortParent, sort, key, value); - } - } - - @Override - public String getAnnotationSymbol() { - return prefix().split("@")[0]; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java deleted file mode 100644 index 3d5edcfac7..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class PYScriptParser extends CommonScriptParser { - - private static final PYScriptParser pYScriptParser = new PYScriptParser(); - - public static CommonScriptParser apply() { - return pYScriptParser; - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON()); - } - - @Override - public String prefixConf() { - return "#conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java deleted file mode 100644 index 806a84760f..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class QLScriptParser extends CommonScriptParser { - - private static final QLScriptParser qLScriptParser = new QLScriptParser(); - - public static CommonScriptParser apply() { - return qLScriptParser; - } - - @Override - public String prefix() { - return "--@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL()); - } - - @Override - public String prefixConf() { - return "--conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java deleted file mode 100644 index 982538d8e8..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ScalaScriptParser extends CommonScriptParser { - - private static final ScalaScriptParser scalaScriptParser = new ScalaScriptParser(); - - public static CommonScriptParser apply() { - return scalaScriptParser; - } - - @Override - public String prefix() { - return "//@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA()); - } - - @Override - public String prefixConf() { - return "//conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java deleted file mode 100644 index e1eebe3e58..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ShellScriptParser extends CommonScriptParser { - - private static final ShellScriptParser shellScriptParser = new ShellScriptParser(); - - public static CommonScriptParser create() { - return shellScriptParser; - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL()); - } - - @Override - public String prefixConf() { - return "#conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java deleted file mode 100644 index 1f862fa6b1..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.reader; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.script.*; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.*; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class StorageScriptFsReader extends ScriptFsReader { - private final FsPath path; - private final String charset; - private final InputStream inputStream; - - private InputStreamReader inputStreamReader; - private BufferedReader bufferedReader; - - private ScriptMetaData metadata; - - private List variables = new ArrayList<>(); - private String lineText; - - public StorageScriptFsReader(FsPath path, String charset, InputStream inputStream) { - super(path, charset); - this.path = path; - this.charset = charset; - this.inputStream = inputStream; - } - - @Override - public Record getRecord() throws IOException { - if (metadata == null) throw new IOException("Must read metadata first(必须先读取metadata)"); - ScriptRecord record = new ScriptRecord(lineText); - lineText = bufferedReader.readLine(); - return record; - } - - @Override - public MetaData getMetaData() throws IOException { - if (metadata == null) init(); - Parser parser = getScriptParser(); - lineText = bufferedReader.readLine(); - while (hasNext() - && Objects.nonNull(parser) - && isMetadata(lineText, parser.prefix(), parser.prefixConf())) { - variables.add(parser.parse(lineText)); - lineText = bufferedReader.readLine(); - } - metadata = new ScriptMetaData(variables.toArray(new Variable[0])); - return metadata; - } - - public void init() { - inputStreamReader = new InputStreamReader(inputStream); - bufferedReader = new BufferedReader(inputStreamReader); - } - - @Override - public int skip(int recordNum) throws IOException { - if (recordNum < 0) return -1; - if (metadata == null) getMetaData(); - try { - return (int) bufferedReader.skip(recordNum); - } catch (Throwable t) { - return recordNum; - } - } - - @Override - public long getPosition() throws IOException { - return -1L; - } - - @Override - public boolean hasNext() throws IOException { - return lineText != null; - } - - @Override - public long available() throws IOException { - return inputStream != null ? inputStream.available() : 0L; - } - - @Override - public void close() throws IOException { - IOUtils.closeQuietly(bufferedReader); - IOUtils.closeQuietly(inputStreamReader); - IOUtils.closeQuietly(inputStream); - } - - /** - * Determine if the read line is metadata(判断读的行是否是metadata) - * - * @param line - * @return - */ - public boolean isMetadata(String line, String prefix, String prefixConf) { - String regex = "\\s*" + prefix + "\\s*(.+)\\s*=\\s*(.+)\\s*"; - if (line.matches(regex)) { - return true; - } else { - String[] split = line.split("="); - if (split.length != 2) { - return false; - } - if (Stream.of(split[0].split(" ")).filter(str -> !"".equals(str)).count() != 4) { - return false; - } - - Optional optional = - Stream.of(split[0].split(" ")).filter(str -> !"".equals(str)).findFirst(); - if (optional.isPresent() && !optional.get().equals(prefixConf)) { - return false; - } - return true; - } - } - - /** - * get the script parser according to the path(根据文件路径 获取对应的script parser ) - * - * @return Scripts Parser - */ - public Parser getScriptParser() { - List parsers = - Arrays.stream(ParserFactory.listParsers()) - .filter(p -> p.belongTo(StorageUtils.pathToSuffix(path.getPath()))) - .collect(Collectors.toList()); - if (parsers.size() > 0) { - return parsers.get(0); - } else { - return null; - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java deleted file mode 100644 index 84dd6abb83..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.writer; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.script.Compaction; -import org.apache.linkis.storage.script.ScriptFsWriter; -import org.apache.linkis.storage.script.ScriptMetaData; -import org.apache.linkis.storage.script.Variable; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; - -import java.io.*; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageScriptFsWriter extends ScriptFsWriter { - private static final Logger log = LoggerFactory.getLogger(StorageScriptFsWriter.class); - - private final FsPath path; - private final String charset; - private final OutputStream outputStream; - private final StringBuilder stringBuilder = new StringBuilder(); - - public StorageScriptFsWriter(FsPath path, String charset, OutputStream outputStream) { - this.path = path; - this.charset = charset; - this.outputStream = outputStream; - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - String suffix = StorageUtils.pathToSuffix(path.getPath()); - List compactions = - Stream.of(Compaction.listCompactions()) - .filter(compaction -> compaction.belongTo(suffix)) - .collect(Collectors.toList()); - List metadataLine = new ArrayList<>(); - if (!compactions.isEmpty()) { - Variable[] metaData1 = ((ScriptMetaData) metaData).getMetaData(); - Stream.of(metaData1).map(compactions.get(0)::compact).forEach(metadataLine::add); - - // add annotition symbol - if (metadataLine.size() > 0) { - metadataLine.add(compactions.get(0).getAnnotationSymbol()); - } - if (outputStream != null) { - IOUtils.writeLines(metadataLine, "\n", outputStream, charset); - } else { - metadataLine.forEach(m -> stringBuilder.append(m).append("\n")); - } - } - } - - @Override - public void addRecord(Record record) throws IOException { - LineRecord scriptRecord = (LineRecord) record; - if (outputStream != null) { - IOUtils.write(scriptRecord.getLine(), outputStream, charset); - } else { - stringBuilder.append(scriptRecord.getLine()); - } - } - - @Override - public void close() { - IOUtils.closeQuietly(outputStream); - } - - @Override - public void flush() { - if (outputStream instanceof HdfsDataOutputStream) { - try { - ((HdfsDataOutputStream) outputStream).hflush(); - } catch (IOException t) { - log.warn("Error encountered when flush script", t); - } - } else if (outputStream != null) { - try { - outputStream.flush(); - } catch (IOException t) { - log.warn("Error encountered when flush script", t); - } - } - } - - @Override - public InputStream getInputStream() { - byte[] bytes = null; - try { - bytes = - stringBuilder.toString().getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue()); - } catch (UnsupportedEncodingException e) { - log.warn("StorageScriptFsWriter getInputStream failed", e); - } - return new ByteArrayInputStream(bytes); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java deleted file mode 100644 index fc4e615b36..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.FsWriter; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.math3.util.Pair; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -public abstract class AbstractFileSource implements FileSource { - - private FileSplit[] fileSplits; - - public AbstractFileSource(FileSplit[] fileSplits) { - this.fileSplits = fileSplits; - } - - @Override - public FileSource shuffle(Function function) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.shuffler = function); - return this; - } - - @Override - public FileSource page(int page, int pageSize) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.page(page, pageSize)); - return this; - } - - @Override - public FileSource addParams(Map params) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.addParams(params)); - return this; - } - - @Override - public FileSource addParams(String key, String value) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.addParams(key, value)); - return this; - } - - @Override - public FileSplit[] getFileSplits() { - return this.fileSplits; - } - - @Override - public Map getParams() { - return Arrays.stream(fileSplits) - .map(FileSplit::getParams) - .flatMap(map -> map.entrySet().stream()) - .collect( - Collectors.toMap( - Map.Entry::getKey, Map.Entry::getValue, (existingValue, newValue) -> newValue)); - } - - @Override - public void write(FsWriter fsWriter) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.write(fsWriter)); - } - - @Override - public void close() { - Arrays.stream(fileSplits).forEach(IOUtils::closeQuietly); - } - - @Override - public Pair>[] collect() { - return Arrays.stream(fileSplits).map(FileSplit::collect).toArray(Pair[]::new); - } - - @Override - public int getTotalLine() { - return Arrays.stream(fileSplits).mapToInt(FileSplit::getTotalLine).sum(); - } - - @Override - public String[] getTypes() { - return Arrays.stream(fileSplits).map(FileSplit::getType).toArray(String[]::new); - } - - @Override - public Pair[] getFileInfo(int needToCountRowNumber) { - return Arrays.stream(fileSplits) - .map(fileSplit -> fileSplit.getFileInfo(needToCountRowNumber)) - .toArray(Pair[]::new); - } - - @Override - public FileSource limitBytes(Long limitBytes) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.setLimitBytes(limitBytes)); - return this; - } - - @Override - public FileSource limitColumnLength(int limitColumnLength) { - Arrays.stream(fileSplits) - .forEach(fileSplit -> fileSplit.setLimitColumnLength(limitColumnLength)); - return this; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java deleted file mode 100644 index 0ed650186d..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.*; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.ResultSetReaderFactory; -import org.apache.linkis.storage.script.ScriptFsReader; -import org.apache.linkis.storage.utils.StorageConfiguration; - -import org.apache.commons.math3.util.Pair; - -import java.io.Closeable; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Function; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE; - -public interface FileSource extends Closeable { - Logger logger = LoggerFactory.getLogger(FileSource.class); - - FileSource shuffle(Function s); - - FileSource page(int page, int pageSize); - - Pair>[] collect(); - - Pair[] getFileInfo(int needToCountRowNumber); - - void write(FsWriter fsWriter); - - FileSource addParams(Map params); - - FileSource addParams(String key, String value); - - Map getParams(); - - int getTotalLine(); - - String[] getTypes(); - - FileSplit[] getFileSplits(); - - String[] fileType = LinkisStorageConf.getFileTypeArr(); - BiFunction suffixPredicate = - (path, suffix) -> path.endsWith("." + suffix); - - static boolean isResultSet(String path) { - return suffixPredicate.apply(path, fileType[0]); - } - - static boolean isResultSet(FsPath fsPath) { - return isResultSet(fsPath.getPath()); - } - - FileSource limitBytes(Long limitBytes); - - FileSource limitColumnLength(int limitColumnLength); - - /** - * Currently only supports table multi-result sets - * - * @param fsPaths - * @param fs - * @return - */ - static FileSource create(FsPath[] fsPaths, Fs fs) { - // Filter non-table result sets - FileSplit[] fileSplits = - Arrays.stream(fsPaths) - .map(fsPath -> createResultSetFileSplit(fsPath, fs)) - .filter(FileSource::isTableResultSet) - .toArray(FileSplit[]::new); - return new ResultsetFileSource(fileSplits); - } - - static boolean isTableResultSet(FileSplit fileSplit) { - return fileSplit.type.equals(ResultSetFactory.TABLE_TYPE); - } - - static boolean isTableResultSet(FileSource fileSource) { - // Return true only if all splits are table result sets - return Arrays.stream(fileSource.getFileSplits()).allMatch(FileSource::isTableResultSet); - } - - static FileSource create(FsPath fsPath, Fs fs) { - if (!canRead(fsPath.getPath())) { - throw new StorageWarnException( - UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc()); - } - if (isResultSet(fsPath)) { - return new ResultsetFileSource(new FileSplit[] {createResultSetFileSplit(fsPath, fs)}); - } else { - return new TextFileSource(new FileSplit[] {createTextFileSplit(fsPath, fs)}); - } - } - - static FileSource create(FsPath fsPath, InputStream is) { - if (!canRead(fsPath.getPath())) { - throw new StorageWarnException( - UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc()); - } - if (isResultSet(fsPath)) { - return new ResultsetFileSource(new FileSplit[] {createResultSetFileSplit(fsPath, is)}); - } else { - return new TextFileSource(new FileSplit[] {createTextFileSplit(fsPath, is)}); - } - } - - static FileSplit createResultSetFileSplit(FsPath fsPath, InputStream is) { - logger.info("try create result set file split with path:{}", fsPath.getPath()); - ResultSet resultset = ResultSetFactory.getInstance().getResultSetByPath(fsPath); - ResultSetReader resultsetReader = ResultSetReaderFactory.getResultSetReader(resultset, is); - return new FileSplit(resultsetReader, resultset.resultSetType()); - } - - static FileSplit createResultSetFileSplit(FsPath fsPath, Fs fs) { - ResultSet resultset = ResultSetFactory.getInstance().getResultSetByPath(fsPath, fs); - ResultSetReader resultsetReader = null; - try { - resultsetReader = ResultSetReaderFactory.getResultSetReader(resultset, fs.read(fsPath)); - } catch (IOException e) { - logger.warn("FileSource createResultSetFileSplit failed", e); - } - return new FileSplit(resultsetReader, resultset.resultSetType()); - } - - static FileSplit createTextFileSplit(FsPath fsPath, InputStream is) { - ScriptFsReader scriptFsReader = - ScriptFsReader.getScriptFsReader( - fsPath, StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue(), is); - return new FileSplit(scriptFsReader); - } - - static FileSplit createTextFileSplit(FsPath fsPath, Fs fs) { - ScriptFsReader scriptFsReader = null; - try { - scriptFsReader = - ScriptFsReader.getScriptFsReader( - fsPath, StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue(), fs.read(fsPath)); - } catch (IOException e) { - logger.warn("FileSource createTextFileSplit failed", e); - } - return new FileSplit(scriptFsReader); - } - - static boolean canRead(String path) { - return Arrays.stream(fileType).anyMatch(suffix -> path.endsWith("." + suffix)); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java deleted file mode 100644 index 3a6c05a54a..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java +++ /dev/null @@ -1,324 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.FsReader; -import org.apache.linkis.common.io.FsWriter; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.script.Parser; -import org.apache.linkis.storage.script.ScriptMetaData; -import org.apache.linkis.storage.script.Variable; -import org.apache.linkis.storage.script.VariableParser; -import org.apache.linkis.storage.script.reader.StorageScriptFsReader; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.math3.util.Pair; - -import java.io.Closeable; -import java.io.IOException; -import java.util.*; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FileSplit implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(FileSplit.class); - - private FsReader fsReader; - protected String type = "script/text"; - private int start = 0; - private int end = -1; - private int count = 0; - private int totalLine = 0; - protected Function shuffler; - private boolean pageTrigger = false; - protected Map params = new HashMap<>(); - private long limitBytes = 0L; - private int limitColumnLength = 0; - - public FileSplit(FsReader fsReader) { - this.fsReader = fsReader; - } - - public FileSplit(FsReader fsReader, String type) { - this.fsReader = fsReader; - this.type = type; - } - - public void page(int page, int pageSize) { - if (!pageTrigger) { - start = (page - 1) * pageSize; - end = pageSize * page - 1; - pageTrigger = true; - } - } - - public String getType() { - return type; - } - - public void addParams(Map params) { - this.params.putAll(params); - } - - public void addParams(String key, String value) { - this.params.put(key, value); - } - - public Map getParams() { - return params; - } - - public int getTotalLine() { - return totalLine; - } - - public void setLimitBytes(long limitBytes) { - this.limitBytes = limitBytes; - } - - public void setLimitColumnLength(int limitColumnLength) { - this.limitColumnLength = limitColumnLength; - } - - public M whileLoop(Function metaDataFunction, Consumer recordConsumer) { - M m = null; - try { - MetaData metaData = fsReader.getMetaData(); - m = metaDataFunction.apply(metaData); - if (pageTrigger) { - fsReader.skip(start); - } - count = start; - boolean hasRemovedFlag = false; - while (fsReader.hasNext() && ifContinueRead()) { - Record record = fsReader.getRecord(); - boolean needRemoveFlag = false; - if (!hasRemovedFlag && fsReader instanceof StorageScriptFsReader) { - Parser parser = ((StorageScriptFsReader) fsReader).getScriptParser(); - Variable[] meta = ((ScriptMetaData) metaData).getMetaData(); - if (meta != null - && meta.length > 0 - && parser != null - && parser.getAnnotationSymbol().equals(record.toString())) { - needRemoveFlag = true; - hasRemovedFlag = true; - } - } - if (!needRemoveFlag) { - recordConsumer.accept(shuffler.apply(record)); - totalLine++; - count++; - } - } - } catch (IOException e) { - logger.warn("FileSplit forEach failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - return m; - } - - public void biConsumerWhileLoop( - Consumer metaDataFunction, Consumer recordConsumer) { - try { - MetaData metaData = fsReader.getMetaData(); - metaDataFunction.accept(metaData); - if (pageTrigger) { - fsReader.skip(start); - } - count = start; - boolean hasRemovedFlag = false; - while (fsReader.hasNext() && ifContinueRead()) { - Record record = fsReader.getRecord(); - boolean needRemoveFlag = false; - if (!hasRemovedFlag && fsReader instanceof StorageScriptFsReader) { - Parser parser = ((StorageScriptFsReader) fsReader).getScriptParser(); - Variable[] meta = ((ScriptMetaData) metaData).getMetaData(); - if (meta != null - && meta.length > 0 - && parser != null - && parser.getAnnotationSymbol().equals(record.toString())) { - needRemoveFlag = true; - hasRemovedFlag = true; - } - } - if (!needRemoveFlag) { - recordConsumer.accept(shuffler.apply(record)); - totalLine++; - count++; - } - } - } catch (IOException e) { - logger.warn("FileSplit forEach failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - } - - public Pair getFileInfo(int needToCountRowNumber) { - int colNumber = 0; - int rowNumber = 0; - MetaData metaData = null; - try { - metaData = fsReader.getMetaData(); - colNumber = - metaData instanceof TableMetaData ? ((TableMetaData) metaData).getColumns().length : 1; - rowNumber = - needToCountRowNumber == -1 - ? fsReader.skip(Integer.MAX_VALUE) - : fsReader.skip(needToCountRowNumber); - } catch (IOException e) { - logger.warn("FileSplit getFileInfo failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - return new Pair<>(colNumber, rowNumber); - } - - public void write(FsWriter fsWriter) { - biConsumerWhileLoop( - metaData -> { - try { - fsWriter.addMetaData(metaData); - } catch (IOException e) { - logger.warn("FileSplit addMetaData failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - }, - record -> { - try { - fsWriter.addRecord(record); - } catch (IOException e) { - logger.warn("FileSplit addRecord failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - }); - } - - public Pair> collect() { - List recordList = new ArrayList<>(); - final AtomicLong tmpBytes = new AtomicLong(0L); - final AtomicBoolean overFlag = new AtomicBoolean(false); - Object metaData = - whileLoop( - collectMetaData -> collectMetaData(collectMetaData), - r -> { - if (!overFlag.get()) { - String[] arr = collectRecord(r); - if (limitBytes > 0) { - for (int i = 0; i < arr.length; i++) { - tmpBytes.addAndGet(arr[i].getBytes().length); - if (overFlag.get() || tmpBytes.get() > limitBytes) { - overFlag.set(true); - arr[i] = ""; - } - } - recordList.add(arr); - } else { - recordList.add(arr); - } - } - }); - return new Pair<>(metaData, recordList); - } - - public String[] collectRecord(Record record) { - if (record instanceof TableRecord) { - TableRecord tableRecord = (TableRecord) record; - if (limitColumnLength > 0) { - return Arrays.stream(tableRecord.row) - .map( - obj -> { - String col = DataType.valueToString(obj); - if (col.length() > limitColumnLength) { - return col.substring(0, limitColumnLength); - } else { - return col; - } - }) - .toArray(String[]::new); - } - return Arrays.stream(tableRecord.row).map(DataType::valueToString).toArray(String[]::new); - } else if (record instanceof LineRecord) { - LineRecord lineRecord = (LineRecord) record; - return new String[] {lineRecord.getLine()}; - } else { - throw new IllegalArgumentException("Unknown record type"); - } - } - - public Object collectMetaData(MetaData metaData) { - if (metaData instanceof ScriptMetaData) { - ScriptMetaData scriptMetaData = (ScriptMetaData) metaData; - return VariableParser.getMap(scriptMetaData.getMetaData()); - } else if (metaData instanceof LineMetaData) { - LineMetaData lineMetaData = (LineMetaData) metaData; - return lineMetaData.getMetaData(); - } else if (metaData instanceof TableMetaData) { - TableMetaData tableMetaData = (TableMetaData) metaData; - return Arrays.stream(tableMetaData.getColumns()) - .map(this::columnToMap) - .collect(Collectors.toList()); - } else { - throw new IllegalArgumentException("Unknown metadata type"); - } - } - - private Map columnToMap(Column column) { - Map stringMap = new HashMap<>(); - stringMap.put("columnName", column.getColumnName()); - stringMap.put("comment", column.getComment()); - stringMap.put("dataType", column.getDataType().getTypeName()); - return stringMap; - } - - public boolean ifContinueRead() { - return !pageTrigger || count <= end; - } - - public boolean ifStartRead() { - return !pageTrigger || count >= start; - } - - @Override - public void close() { - IOUtils.closeQuietly(fsReader); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java deleted file mode 100644 index fb064a8f4f..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.util.Arrays; - -public class ResultsetFileSource extends AbstractFileSource { - - public ResultsetFileSource(FileSplit[] fileSplits) { - super(fileSplits); - shuffle( - record -> { - if (record instanceof TableRecord) { - TableRecord tableRecord = (TableRecord) record; - String nullValue = getParams().getOrDefault("nullValue", "NULL"); - return new TableRecord( - Arrays.stream(tableRecord.row) - .map( - r -> { - if (r == null || r.equals("NULL")) { - if (nullValue.equals(Dolphin.LINKIS_NULL)) { - return r; - } else { - return nullValue; - } - } else if (r.equals("")) { - String emptyValue = getParams().getOrDefault("nullValue", ""); - if (emptyValue.equals(Dolphin.LINKIS_NULL)) { - return ""; - } else { - return nullValue; - } - } else if (r instanceof Double) { - return StorageUtils.doubleToString((Double) r); - } else { - return r; - } - }) - .toArray()); - } else { - return record; - } - }); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java deleted file mode 100644 index 7e5396bf74..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.script.ScriptRecord; - -import org.apache.commons.math3.util.Pair; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -class TextFileSource extends AbstractFileSource { - public static final String[] LINE_BREAKER = new String[] {"\n"}; - - public TextFileSource(FileSplit[] fileSplits) { - super(fileSplits); - shuffle( - record -> { - if (record instanceof ScriptRecord && "".equals(((ScriptRecord) record).getLine())) { - return new LineRecord("\n"); - } else { - return record; - } - }); - } - - @Override - public Pair>[] collect() { - Pair>[] collects = super.collect(); - if (!getParams().getOrDefault("ifMerge", "true").equals("true")) { - return collects; - } - ArrayList> snds = - Arrays.stream(collects) - .map(Pair::getSecond) - .collect(Collectors.toCollection(ArrayList::new)); - snds.forEach( - snd -> { - StringBuilder str = new StringBuilder(); - snd.forEach( - arr -> { - if (Arrays.equals(arr, LINE_BREAKER)) { - str.append("\n"); - } else { - str.append(arr[0]).append("\n"); - } - }); - snd.clear(); - snd.add(new String[] {str.toString()}); - }); - return collects; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java deleted file mode 100644 index 0f93cdb6ab..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.fs.FileSystem; -import org.apache.linkis.storage.fs.impl.LocalFileSystem; - -import org.apache.commons.io.IOUtils; - -import java.io.IOException; -import java.util.Objects; -import java.util.Stack; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FileSystemUtils { - private static final Logger logger = LoggerFactory.getLogger(FileSystemUtils.class); - - public static void copyFile(FsPath filePath, FsPath origin, String user) throws Exception { - FileSystem fileSystem = (FileSystem) FSFactory.getFsByProxyUser(filePath, user); - try { - fileSystem.init(null); - if (!fileSystem.exists(filePath)) { - if (!fileSystem.exists(filePath.getParent())) { - fileSystem.mkdirs(filePath.getParent()); - } - fileSystem.createNewFile(filePath); - } - fileSystem.copyFile(origin, filePath); - } finally { - IOUtils.closeQuietly(fileSystem); - } - } - - /** - * Create a new file - * - * @param filePath - * @param createParentWhenNotExists Whether to recursively create a directory - */ - public static void createNewFile(FsPath filePath, boolean createParentWhenNotExists) - throws Exception { - createNewFile(filePath, StorageUtils.getJvmUser(), createParentWhenNotExists); - } - - public static void createNewFile( - FsPath filePath, String user, boolean createParentWhenNotExists) { - FileSystem fileSystem = (FileSystem) FSFactory.getFsByProxyUser(filePath, user); - try { - fileSystem.init(null); - createNewFileWithFileSystem(fileSystem, filePath, user, createParentWhenNotExists); - } catch (IOException e) { - logger.warn("FileSystemUtils createNewFile failed", e); - } catch (Exception e) { - logger.warn("FileSystemUtils createNewFile failed", e); - } finally { - IOUtils.closeQuietly(fileSystem); - } - } - - public static void createNewFileWithFileSystem( - FileSystem fileSystem, FsPath filePath, String user, boolean createParentWhenNotExists) - throws Exception { - if (!fileSystem.exists(filePath)) { - if (!fileSystem.exists(filePath.getParent())) { - if (!createParentWhenNotExists) { - throw new IOException( - "parent dir " + filePath.getParent().getPath() + " dose not exists."); - } - mkdirs(fileSystem, filePath.getParent(), user); - } - fileSystem.createNewFile(filePath); - if (fileSystem instanceof LocalFileSystem) { - fileSystem.setOwner(filePath, user); - } else { - logger.info("doesn't need to call setOwner"); - } - } - } - - /** - * Recursively create a directory - * - * @param fileSystem - * @param dest - * @param user - * @throws IOException - * @return - */ - public static boolean mkdirs(FileSystem fileSystem, FsPath dest, String user) throws IOException { - FsPath parentPath = dest.getParent(); - Stack dirsToMake = new Stack<>(); - dirsToMake.push(dest); - while (!fileSystem.exists(parentPath)) { - dirsToMake.push(parentPath); - - if (Objects.isNull(parentPath.getParent())) { - // parent path of root is null - break; - } - - parentPath = parentPath.getParent(); - } - if (!fileSystem.canExecute(parentPath)) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - while (!dirsToMake.empty()) { - FsPath path = dirsToMake.pop(); - fileSystem.mkdir(path); - if (fileSystem instanceof LocalFileSystem) { - fileSystem.setOwner(path, user); - } else { - logger.info("doesn't need to call setOwner"); - } - } - return true; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java deleted file mode 100644 index 70a3839b62..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.conf.ByteType; -import org.apache.linkis.common.conf.CommonVars; - -import java.util.List; - -import com.google.common.collect.Lists; - -public class StorageConfiguration { - - public static CommonVars PROXY_USER = - new CommonVars<>("wds.linkis.storage.proxy.user", "${UM}", null, null); - - public static CommonVars STORAGE_ROOT_USER = - new CommonVars<>("wds.linkis.storage.root.user", "hadoop", null, null); - - public static CommonVars HDFS_ROOT_USER = - new CommonVars<>("wds.linkis.storage.hdfs.root.user", "hadoop", null, null); - - public static CommonVars LOCAL_ROOT_USER = - new CommonVars<>("wds.linkis.storage.local.root.user", "root", null, null); - - public static CommonVars STORAGE_USER_GROUP = - new CommonVars<>("wds.linkis.storage.fileSystem.group", "bdap", null, null); - - public static CommonVars STORAGE_RS_FILE_TYPE = - new CommonVars<>("wds.linkis.storage.rs.file.type", "utf-8", null, null); - - public static CommonVars STORAGE_RS_FILE_SUFFIX = - new CommonVars<>("wds.linkis.storage.rs.file.suffix", ".dolphin", null, null); - - public static CommonVars LINKIS_STORAGE_FS_LABEL = - new CommonVars<>("linkis.storage.default.fs.label", "linkis-storage", null, null); - - public static List ResultTypes = - Lists.newArrayList("%TEXT", "%TABLE", "%HTML", "%IMG", "%ANGULAR", "%SVG"); - - public static CommonVars STORAGE_RESULT_SET_PACKAGE = - new CommonVars<>( - "wds.linkis.storage.result.set.package", - "org.apache.linkis.storage.resultset", - null, - null); - - public static CommonVars STORAGE_RESULT_SET_CLASSES = - new CommonVars<>( - "wds.linkis.storage.result.set.classes", - "txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet", - null, - null); - - public static CommonVars STORAGE_BUILD_FS_CLASSES = - new CommonVars<>( - "wds.linkis.storage.build.fs.classes", - "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem," - + "org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem", - null, - null); - - public static CommonVars IS_SHARE_NODE = - new CommonVars<>("wds.linkis.storage.is.share.node", true, null, null); - - public static CommonVars ENABLE_IO_PROXY = - new CommonVars<>("wds.linkis.storage.enable.io.proxy", false, null, null); - - public static CommonVars IO_USER = - new CommonVars<>("wds.linkis.storage.io.user", "root", null, null); - public static CommonVars IO_FS_EXPIRE_TIME = - new CommonVars<>("wds.linkis.storage.io.fs.num", 1000 * 60 * 10, null, null); - - public static CommonVars IO_PROXY_READ_FETCH_SIZE = - new CommonVars<>("wds.linkis.storage.io.read.fetch.size", new ByteType("100k"), null, null); - - public static CommonVars IO_PROXY_WRITE_CACHE_SIZE = - new CommonVars<>("wds.linkis.storage.io.write.cache.size", new ByteType("64k"), null, null); - - public static CommonVars IO_DEFAULT_CREATOR = - new CommonVars<>("wds.linkis.storage.io.default.creator", "IDE", null, null); - public static CommonVars IO_FS_RE_INIT = - new CommonVars<>("wds.linkis.storage.io.fs.re.init", "re-init", null, null); - - public static CommonVars IO_INIT_RETRY_LIMIT = - new CommonVars<>("wds.linkis.storage.io.init.retry.limit", 10, null, null); - - public static CommonVars STORAGE_HDFS_GROUP = - new CommonVars<>("wds.linkis.storage.fileSystem.hdfs.group", "hadoop", null, null); - - public static CommonVars DOUBLE_FRACTION_LEN = - new CommonVars<>("wds.linkis.double.fraction.length", 30, null, null); - - public static CommonVars HDFS_PATH_PREFIX_CHECK_ON = - new CommonVars<>("wds.linkis.storage.hdfs.prefix_check.enable", true, null, null); - - public static CommonVars HDFS_PATH_PREFIX_REMOVE = - new CommonVars<>("wds.linkis.storage.hdfs.prefxi.remove", true, null, null); - - public static CommonVars FS_CACHE_DISABLE = - new CommonVars<>("wds.linkis.fs.hdfs.impl.disable.cache", false, null, null); - - public static CommonVars FS_CHECKSUM_DISBALE = - new CommonVars<>("linkis.fs.hdfs.impl.disable.checksum", false, null, null); - - /** - * more arguments please refer to: - * https://hadoop.apache.org/docs/stable/hadoop-aliyun/tools/hadoop-aliyun/index.html Aliyun OSS - * endpoint to connect to. eg: https://oss-cn-hangzhou.aliyuncs.com - */ - public static CommonVars OSS_ENDPOINT = - new CommonVars("wds.linkis.fs.oss.endpoint", "", null, null); - - /** Aliyun bucket name eg: benchmark2 */ - public static CommonVars OSS_ACCESS_BUCKET_NAME = - new CommonVars("wds.linkis.fs.oss.bucket.name", "", null, null); - - /** Aliyun access key ID */ - public static CommonVars OSS_ACCESS_KEY_ID = - new CommonVars("wds.linkis.fs.oss.accessKeyId", "", null, null); - - /** Aliyun access key secret */ - public static CommonVars OSS_ACCESS_KEY_SECRET = - new CommonVars("wds.linkis.fs.oss.accessKeySecret", "", null, null); - - public static CommonVars OSS_PATH_PREFIX_CHECK_ON = - new CommonVars("wds.linkis.storage.oss.prefix_check.enable", false, null, null); - - public static CommonVars OSS_PATH_PREFIX_REMOVE = - new CommonVars("wds.linkis.storage.oss.prefix.remove", true, null, null); - - public static CommonVars S3_ACCESS_KEY = - new CommonVars("linkis.storage.s3.access.key", "", null, null); - - public static CommonVars S3_SECRET_KEY = - new CommonVars("linkis.storage.s3.secret.key", "", null, null); - - public static CommonVars S3_ENDPOINT = - new CommonVars("linkis.storage.s3.endpoint", "", null, null); - - public static CommonVars S3_REGION = - new CommonVars("linkis.storage.s3.region", "", null, null); - - public static CommonVars S3_BUCKET = - new CommonVars("linkis.storage.s3.bucket", "", null, null); -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java deleted file mode 100644 index e1dee151ca..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.ResultSetReaderFactory; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.io.IOException; -import java.util.Arrays; - -/** - * 工具类,用于做storage jar包打出来做测试用 Tool class, which is used to print the storage jar package for testing - */ -public class StorageHelper { - private static final Log logger = LogFactory.getLog(StorageHelper.class); - - public static void main(String[] args) { - if (args.length < 2) logger.info("Usage method params eg:getTableResLines path"); - String method = args[0]; - String[] params = Arrays.copyOfRange(args, 1, args.length); - try { - Thread.sleep(10000L); - } catch (InterruptedException e) { - } - - switch (method) { - case "getTableResLines": - getTableResLines(params); - break; - case "getTableRes": - getTableRes(params); - break; - case "createNewFile": - createNewFile(params); - break; - default: - logger.info("There is no such method"); - } - } - - /** - * Get the number of table result set file lines(获得表格结果集文件行数) - * - * @param args - */ - public static void getTableResLines(String[] args) { - ResultSetReader resultSetReader = null; - try { - FsPath resPath = StorageUtils.getFsPath(args[0]); - ResultSetFactory resultSetFactory = ResultSetFactory.getInstance(); - - ResultSet resultSet = - resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE); - Fs fs = FSFactory.getFs(resPath); - fs.init(null); - resultSetReader = ResultSetReaderFactory.getResultSetReader(resultSet, fs.read(resPath)); - TableMetaData metaData = (TableMetaData) resultSetReader.getMetaData(); - Arrays.stream(metaData.getColumns()).forEach(column -> logger.info(column.toString())); - int num = 0; - Thread.sleep(10000L); - while (resultSetReader.hasNext()) { - resultSetReader.getRecord(); - num++; - } - logger.info(Integer.toString(num)); - } catch (Exception e) { - logger.error("getTableResLines error:", e); - } finally { - if (resultSetReader != null) { - try { - resultSetReader.close(); - } catch (IOException e) { - logger.error("Failed to close ResultSetReader", e); - } - } - } - } - - public static void getTableRes(String[] args) { - try { - int len = Integer.parseInt(args[1]); - int max = len + 10; - FsPath resPath = StorageUtils.getFsPath(args[0]); - ResultSetFactory resultSetFactory = ResultSetFactory.getInstance(); - ResultSet resultSet = - resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE); - Fs fs = FSFactory.getFs(resPath); - - fs.init(null); - - ResultSetReader reader = - ResultSetReaderFactory.getResultSetReader(resultSet, fs.read(resPath)); - MetaData rmetaData = reader.getMetaData(); - Arrays.stream(((TableMetaData) rmetaData).getColumns()) - .forEach(column -> logger.info(column.toString())); - Arrays.stream(((TableMetaData) rmetaData).getColumns()) - .map(column -> column.getColumnName() + ",") - .forEach(column -> logger.info(column)); - int num = 0; - while (reader.hasNext()) { - num++; - if (num > max) return; - if (num > len) { - Record record = reader.getRecord(); - Arrays.stream(((TableRecord) record).row) - .forEach( - value -> { - logger.info(value.toString()); - logger.info(","); - }); - logger.info("\n"); - } - } - } catch (IOException e) { - logger.warn("StorageHelper getTableRes failed", e); - } - } - - public static void createNewFile(String[] args) { - FsPath resPath = StorageUtils.getFsPath(args[0]); - String proxyUser = StorageUtils.getJvmUser(); - try { - FileSystemUtils.createNewFile(resPath, proxyUser, true); - } catch (Exception e) { - logger.warn("StorageHelper createNewFile failed", e); - } - logger.info("success"); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java deleted file mode 100644 index 692ce619b2..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java +++ /dev/null @@ -1,275 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.hadoop.common.conf.HadoopConf; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.ResultSetReaderFactory; -import org.apache.linkis.storage.resultset.ResultSetWriterFactory; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.curator.utils.CloseableUtils; - -import java.io.*; -import java.lang.reflect.Method; -import java.text.NumberFormat; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.CONFIGURATION_NOT_READ; - -public class StorageUtils { - private static final Logger logger = LoggerFactory.getLogger(StorageUtils.class); - - public static final String HDFS = "hdfs"; - public static final String FILE = "file"; - public static final String OSS = "oss"; - public static final String S3 = "s3"; - - public static final String FILE_SCHEMA = "file://"; - public static final String HDFS_SCHEMA = "hdfs://"; - public static final String OSS_SCHEMA = "oss://"; - public static final String S3_SCHEMA = "s3://"; - - private static final NumberFormat nf = NumberFormat.getInstance(); - - static { - nf.setGroupingUsed(false); - nf.setMaximumFractionDigits((int) StorageConfiguration.DOUBLE_FRACTION_LEN.getValue()); - } - - public static String doubleToString(double value) { - return nf.format(value); - } - - public static Map loadClass(String classStr, Function op) { - String[] _classes = classStr.split(","); - LinkedHashMap classes = new LinkedHashMap<>(); - for (String clazz : _classes) { - try { - T obj = Utils.getClassInstance(clazz.trim()); - classes.put(op.apply(obj), obj); - } catch (Exception e) { - logger.warn("StorageUtils loadClass failed", e); - } - } - return classes; - } - - public static Map> loadClasses( - String classStr, String pge, Function, String> op) { - String[] _classes = - StringUtils.isEmpty(pge) - ? classStr.split(",") - : Stream.of(StringUtils.split(classStr, ',')) - .map(value -> pge + "." + value) - .toArray(String[]::new); - Map> classes = new LinkedHashMap<>(); - for (String clazz : _classes) { - try { - Class _class = - (Class) Thread.currentThread().getContextClassLoader().loadClass(clazz.trim()); - classes.put(op.apply(_class), _class); - } catch (Exception e) { - logger.warn("StorageUtils loadClasses failed", e); - } - } - return classes; - } - - public static String pathToSuffix(String path) { - String fileName = new File(path).getName(); - if (fileName.length() > 0) { - int dot = fileName.lastIndexOf('.'); - if (dot > -1 && dot < fileName.length() - 1) { - return fileName.substring(dot + 1); - } - } - return fileName; - } - - public static Object invoke(Object obj, Method method, Object[] args) throws Exception { - return method.invoke(obj, args); - } - - /** - * Serialized string is a result set of type Text(序列化字符串为Text类型的结果集) - * - * @param value - * @return - */ - public static String serializerStringToResult(String value) throws IOException { - ResultSet resultSet = - ResultSetFactory.getInstance().getResultSetByType(ResultSetFactory.TEXT_TYPE); - ResultSetWriter writer = - ResultSetWriterFactory.getResultSetWriter(resultSet, Long.MAX_VALUE, null); - LineMetaData metaData = new LineMetaData(null); - LineRecord record = new LineRecord(value); - writer.addMetaData(metaData); - writer.addRecord(record); - String res = writer.toString(); - IOUtils.closeQuietly(writer); - return res; - } - - /** - * The result set of serialized text is a string(序列化text的结果集为字符串) - * - * @param result - * @return - */ - public static String deserializerResultToString(String result) throws IOException { - ResultSet resultSet = - ResultSetFactory.getInstance().getResultSetByType(ResultSetFactory.TEXT_TYPE); - ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(resultSet, result); - StringBuilder sb = new StringBuilder(); - while (reader.hasNext()) { - LineRecord record = (LineRecord) reader.getRecord(); - sb.append(record.getLine()); - } - reader.close(); - return sb.toString(); - } - - public static void close(OutputStream outputStream) { - close(outputStream, null, null); - } - - public static void close(InputStream inputStream) { - close(null, inputStream, null); - } - - public static void close(Fs fs) { - close(null, null, fs); - } - - public static void close(OutputStream outputStream, InputStream inputStream, Fs fs) { - try { - if (outputStream != null) outputStream.close(); - } catch (IOException e) { - // ignore exception - } - try { - if (inputStream != null) inputStream.close(); - } catch (IOException e) { - // ignore exception - } - try { - if (fs != null) fs.close(); - } catch (IOException e) { - // ignore exception - } - } - - public static void close(Closeable closeable) { - CloseableUtils.closeQuietly(closeable); - } - - public static String getJvmUser() { - return System.getProperty("user.name"); - } - - public static boolean isHDFSNode() { - File confPath = new File(HadoopConf.hadoopConfDir()); - // TODO IO-client mode need return false - if (!confPath.exists() || confPath.isFile()) { - throw new StorageWarnException( - CONFIGURATION_NOT_READ.getErrorCode(), CONFIGURATION_NOT_READ.getErrorDesc()); - } else return true; - } - - /** - * Returns the FsPath by determining whether the path is a schema. By default, the FsPath of the - * file is returned. - * - * @param path - * @return - */ - public static FsPath getFsPath(String path) { - if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA)) { - return new FsPath(path); - } else { - return new FsPath(FILE_SCHEMA + path); - } - } - - public static int readBytes(InputStream inputStream, byte[] bytes, int len) { - int readLen = 0; - try { - int count = 0; - // 当使用s3存储结果文件时时,com.amazonaws.services.s3.model.S3InputStream无法正确读取.dolphin文件。需要在循环条件添加: - // readLen >= 0 - // To resolve the issue when using S3 to store result files and - // com.amazonaws.services.s3.model.S3InputStream to read .dolphin files, you need to add the - // condition readLen >= 0 in the loop. - while (readLen < len && readLen >= 0) { - count = inputStream.read(bytes, readLen, len - readLen); - - if (count == -1 && inputStream.available() < 1) { - return readLen; - } - readLen += count; - } - } catch (IOException e) { - logger.warn("FileSystemUtils readBytes failed", e); - } - return readLen; - } - - public static String colToString(Object col, String nullValue) { - if (col == null) { - return nullValue; - } else if (col instanceof Double) { - return doubleToString((Double) col); - } else if ("NULL".equals(col) || "".equals(col)) { - return nullValue; - } else { - return col.toString(); - } - } - - public static String colToString(Object col) { - return colToString(col, "NULL"); - } - - public static boolean isIOProxy() { - return (boolean) StorageConfiguration.ENABLE_IO_PROXY.getValue(); - } - - public static byte[] mergeByteArrays(byte[] arr1, byte[] arr2) { - byte[] mergedArray = new byte[arr1.length + arr2.length]; - System.arraycopy(arr1, 0, mergedArray, 0, arr1.length); - System.arraycopy(arr2, 0, mergedArray, arr1.length, arr2.length); - return mergedArray; - } -} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala new file mode 100644 index 0000000000..5968332e10 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage + +import org.apache.linkis.common.io.{Fs, FsPath} +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_FILE +import org.apache.linkis.storage.exception.StorageFatalException +import org.apache.linkis.storage.factory.BuildFactory +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import java.text.MessageFormat + +object FSFactory extends Logging { + + private val buildClasses: Map[String, BuildFactory] = StorageUtils.loadClass[BuildFactory]( + StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue, + t => t.fsName() + ) + + def getBuildFactory(fsName: String): BuildFactory = { + if (!buildClasses.contains(fsName)) { + throw new StorageFatalException( + UNSUPPORTED_FILE.getErrorCode, + MessageFormat.format(UNSUPPORTED_FILE.getErrorDesc, fsName) + ) + } + buildClasses(fsName) + } + + def getFs(fsType: String, proxyUser: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fsType).getFs(user, proxyUser) + } + + def getFs(fsType: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fsType).getFs(user, user) + } + + /** + * 1. If this machine has shared storage, the file:// type FS obtained here is the FS of the + * process user. 2, if this machine does not have shared storage, then the file:// type FS + * obtained is the proxy to the Remote (shared storage machine root) FS 3. If it is HDFS, it + * returns the FS of the process user. 1、如果这台机器装有共享存储则这里获得的file://类型的FS为该进程用户的FS + * 2、如果这台机器没有共享存储则获得的file://类型的FS为代理到Remote(共享存储机器root)的FS 3、如果是HDFS则返回的就是该进程用户的FS + * @param fsPath + * @return + */ + def getFs(fsPath: FsPath): Fs = { + getFs(fsPath.getFsType()) + } + + /** + * 1. If the process user is passed and the proxy user and the process user are consistent, the + * file:// type FS is the FS of the process user (the shared storage exists) 2, if the + * process user is passed and the proxy user and the process user are consistent and there is + * no shared storage, the file:// type FS is the proxy to the remote (shared storage machine + * root) FS 3. If the passed proxy user and process user are consistent, the hdfs type is the + * FS of the process user. 4. If the proxy user and the process user are inconsistent, the + * hdfs type is the FS after the proxy. 1、如果传了进程用户且代理用户和进程用户一致则file://类型的FS为该进程用户的FS(存在共享存储) + * 2、如果传了进程用户且代理用户和进程用户一致且没有共享存储则file://类型的FS为代理到Remote(共享存储机器root)的FS + * 3、如果传了的代理用户和进程用户一致则hdfs类型为该进程用户的FS 4、如果传了代理用户和进程用户不一致则hdfs类型为代理后的FS + * + * @param fsPath + * @param proxyUser + * @return + */ + def getFsByProxyUser(fsPath: FsPath, proxyUser: String): Fs = { + getFs(fsPath.getFsType(), proxyUser) + } + + def getFSByLabel(fs: String, label: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fs).getFs(user, user, label) + } + + def getFSByLabelAndUser(fs: String, label: String, proxy: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fs).getFs(user, proxy, label) + } + +} diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala similarity index 69% rename from linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala index c284a14c33..e6c9df4548 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala @@ -15,22 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.filesystem +package org.apache.linkis.storage -import org.junit.jupiter.api.{Assertions, DisplayName, Test} +import org.apache.linkis.common.io.MetaData +import org.apache.linkis.storage.resultset.ResultMetaData -class WorkspaceClientFactoryTest { +class LineMetaData(private var metaData: String = null) extends ResultMetaData { - @Test - @DisplayName("getClientTest") - def getClientTest(): Unit = { + def getMetaData: String = metaData - val user = "hadoop" - val token = "abc" - val gatewayAddress = "127.0.0.1:9001" - - val client = WorkspaceClientFactory.getClient(user, token, gatewayAddress) - Assertions.assertNotNull(client) + def setMetaData(metaData: String): Unit = { + this.metaData = metaData } + override def cloneMeta(): MetaData = new LineMetaData(metaData) } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableRecord.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala similarity index 67% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableRecord.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala index c8270714ba..d6e3220cf2 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableRecord.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala @@ -15,21 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.storage.resultset.table; +package org.apache.linkis.storage -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.resultset.ResultRecord; +import org.apache.linkis.common.io.Record +import org.apache.linkis.storage.resultset.ResultRecord -public class TableRecord implements ResultRecord { +class LineRecord(private var line: String) extends ResultRecord { - public final Object[] row; + def getLine: String = line - public TableRecord(Object[] row) { - this.row = row; + def setLine(line: String): Unit = { + this.line = line } - @Override - public Record cloneRecord() { - return new TableRecord(row.clone()); - } + override def cloneRecord(): Record = new LineRecord(line) + + override def toString: String = line } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala new file mode 100644 index 0000000000..5330983dd6 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.conf + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.ByteTimeUtils + +import org.apache.commons.lang3.StringUtils + +object LinkisStorageConf { + private val CONF_LOCK = new Object() + + val enableLimitThreadLocal: ThreadLocal[String] = new ThreadLocal[String] + + val columnIndicesThreadLocal: ThreadLocal[Array[Int]] = new ThreadLocal[Array[Int]] + + val HDFS_FILE_SYSTEM_REST_ERRS: String = + CommonVars + .apply( + "wds.linkis.hdfs.rest.errs", + ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*" + ) + .getValue + + val ROW_BYTE_MAX_LEN_STR = CommonVars("wds.linkis.resultset.row.max.str", "2m").getValue + + val ROW_BYTE_MAX_LEN = ByteTimeUtils.byteStringAsBytes(ROW_BYTE_MAX_LEN_STR) + + val FILE_TYPE = CommonVars( + "wds.linkis.storage.file.type", + "dolphin,sql,scala,py,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql,txt" + ).getValue + + private var fileTypeArr: Array[String] = null + + private def fileTypeArrParser(fileType: String): Array[String] = { + if (StringUtils.isBlank(fileType)) Array() + else fileType.split(",") + } + + def getFileTypeArr: Array[String] = { + if (fileTypeArr == null) { + CONF_LOCK.synchronized { + if (fileTypeArr == null) { + fileTypeArr = fileTypeArrParser(FILE_TYPE) + } + } + } + fileTypeArr + } + + val LINKIS_RESULT_ENABLE_NULL = CommonVars("linkis.resultset.enable.null.replace", false).getValue + + val LINKIS_RESULT_COLUMN_SIZE = + CommonVars("linkis.resultset.column.size.max", Int.MaxValue).getValue + + val LINKIS_RESULT_COL_LENGTH = + CommonVars("linkis.resultset.col.length.max", Int.MaxValue).getValue + + val LINKIS__READ_RESULT_ROW_MAX_LEN_STR = + CommonVars("linkis.resultset.read.row.max.str", "20m").getValue + + val LINKIS_READ_ROW_BYTE_MAX_LEN = + ByteTimeUtils.byteStringAsBytes(LINKIS__READ_RESULT_ROW_MAX_LEN_STR) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala new file mode 100644 index 0000000000..d40d041a3e --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.csv + +import org.apache.linkis.common.io.FsReader + +abstract class CSVFsReader extends FsReader {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala new file mode 100644 index 0000000000..93610a7db2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.csv + +import org.apache.linkis.common.io.FsWriter + +import java.io.OutputStream + +abstract class CSVFsWriter extends FsWriter { + val charset: String + val separator: String + val quoteRetouchEnable: Boolean +} + +object CSVFsWriter { + + def getCSVFSWriter( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream + ): CSVFsWriter = new StorageCSVWriter(charset, separator, quoteRetouchEnable, outputStream) + + def getCSVFSWriter( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream, + keepNewline: Boolean + ): CSVFsWriter = + new StorageCSVWriter(charset, separator, quoteRetouchEnable, outputStream, keepNewline) + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala similarity index 91% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala index a06dfaab18..6fc91c836b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.manager.rm.utils; +package org.apache.linkis.storage.csv -public class AlertUtils {} +class StorageCSVReader {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala new file mode 100644 index 0000000000..95d98669b8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.csv + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.domain.DataType +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils + +import java.io._ + +class StorageCSVWriter( + val charset: String, + val separator: String, + val quoteRetouchEnable: Boolean, + val outputStream: OutputStream +) extends CSVFsWriter + with Logging { + + var keepNewline: Boolean = false; + + def this( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream, + keepNewline: Boolean + ) { + this(charset, separator, quoteRetouchEnable, outputStream) + this.keepNewline = keepNewline + } + + private val delimiter = separator match { + // Compatible with possible missing escape characters + case "t" => '\t' + case separ if StringUtils.isNotEmpty(separ) => separ + case _ => '\t' + } + + private val buffer: StringBuilder = new StringBuilder(50000) + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + val head = metaData.asInstanceOf[TableMetaData].columns.map(_.columnName) + write(head) + } + + private def compact(row: Array[String]): String = { + val quotationMarks: String = "\"" + val dealNewlineSymbolMarks: String = "\n" + + def decorateValue(v: String): String = { + if (StringUtils.isBlank(v)) v + else { + var res = v + if (quoteRetouchEnable) { + res = s"$quotationMarks${v.replaceAll(quotationMarks, "")}$quotationMarks" + } + if (!this.keepNewline) { + res = res.replaceAll(dealNewlineSymbolMarks, " ") + } + logger.debug("decorateValue with input:" + v + " output:" + res) + res + } + } + + if (logger.isDebugEnabled()) { + logger.debug("delimiter:" + delimiter.toString) + } + + row.map(x => decorateValue(x)).toList.mkString(delimiter.toString) + "\n" + } + + private def write(row: Array[String]) = { + val content: String = compact(row) + if (buffer.length + content.length > 49500) { + IOUtils.write(buffer.toString().getBytes(charset), outputStream) + buffer.clear() + } + buffer.append(content) + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + val body = record.asInstanceOf[TableRecord].row.map(DataType.valueToString) + write(body) + } + + override def flush(): Unit = { + IOUtils.write(buffer.toString().getBytes(charset), outputStream) + buffer.clear() + } + + override def close(): Unit = { + flush() + IOUtils.closeQuietly(outputStream) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala new file mode 100644 index 0000000000..036cd8bfa2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.domain + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf + +import java.math.{BigDecimal => JavaBigDecimal} +import java.sql.{Date, Timestamp} + +object DataType extends Logging { + + val LOWCASE_NULL_VALUE = "null" + + val DECIMAL_REGEX = "^decimal\\(\\s*\\d*\\s*,\\s*\\d*\\s*\\)".r.unanchored + + val SHORT_REGEX = "^short.*".r.unanchored + val INT_REGEX = "^int.*".r.unanchored + val LONG_REGEX = "^long.*".r.unanchored + val BIGINT_REGEX = "^bigint.*".r.unanchored + val FLOAT_REGEX = "^float.*".r.unanchored + val DOUBLE_REGEX = "^double.*".r.unanchored + + val VARCHAR_REGEX = "^varchar.*".r.unanchored + val CHAR_REGEX = "^char.*".r.unanchored + + val ARRAY_REGEX = "array.*".r.unanchored + + val MAP_REGEX = "map.*".r.unanchored + + val LIST_REGEX = "list.*".r.unanchored + + val STRUCT_REGEX = "struct.*".r.unanchored + + implicit def toDataType(dataType: String): DataType = dataType match { + case "void" | "null" => NullType + case "string" => StringType + case "boolean" => BooleanType + case SHORT_REGEX() => ShortIntType + case LONG_REGEX() => LongType + case BIGINT_REGEX() => BigIntType + case INT_REGEX() | "integer" | "smallint" => IntType + case FLOAT_REGEX() => FloatType + case DOUBLE_REGEX() => DoubleType + case VARCHAR_REGEX() => VarcharType + case CHAR_REGEX() => CharType + case "date" => DateType + case "timestamp" => TimestampType + case "binary" => BinaryType + case "decimal" | DECIMAL_REGEX() => DecimalType + case ARRAY_REGEX() => ArrayType + case MAP_REGEX() => MapType + case LIST_REGEX() => ListType + case STRUCT_REGEX() => StructType + case _ => StringType + } + + def toValue(dataType: DataType, value: String): Any = { + var newValue: String = value + if (isLinkisNull(value)) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + return null + } else { + newValue = Dolphin.NULL + } + } + Utils.tryCatch(dataType match { + case NullType => null + case StringType | CharType | VarcharType | StructType | ListType | ArrayType | MapType => + newValue + case BooleanType => if (isNumberNull(newValue)) null else newValue.toBoolean + case ShortIntType => if (isNumberNull(newValue)) null else newValue.toShort + case IntType => if (isNumberNull(newValue)) null else newValue.toInt + case LongType | BigIntType => if (isNumberNull(newValue)) null else newValue.toLong + case FloatType => if (isNumberNull(newValue)) null else newValue.toFloat + case DoubleType => if (isNumberNull(newValue)) null else newValue.toDouble + case DecimalType => if (isNumberNull(newValue)) null else new JavaBigDecimal(newValue) + case DateType => if (isNumberNull(newValue)) null else Date.valueOf(newValue) + case TimestampType => + if (isNumberNull(newValue)) null else Timestamp.valueOf(newValue).toString.stripSuffix(".0") + case BinaryType => if (isNull(newValue)) null else newValue.getBytes() + case _ => newValue + }) { t => + logger.debug(s"Failed to $newValue switch to dataType:", t) + newValue + } + } + + def isLinkisNull(value: String): Boolean = { + if (value == null || value == Dolphin.LINKIS_NULL) true else false + } + + def isNull(value: String): Boolean = + if (value == null || value == Dolphin.NULL || value.trim == "") true else false + + def isNumberNull(value: String): Boolean = + if (null == value || Dolphin.NULL.equalsIgnoreCase(value) || value.trim == "") { + true + } else { + false + } + + def valueToString(value: Any): String = { + if (null == value) return null + value match { + case javaDecimal: JavaBigDecimal => + javaDecimal.toPlainString + case _ => value.toString + } + } + +} + +abstract class DataType(val typeName: String, val javaSQLType: Int) { + override def toString: String = typeName +} + +case object NullType extends DataType("void", 0) +case object StringType extends DataType("string", 12) +case object BooleanType extends DataType("boolean", 16) +case object TinyIntType extends DataType("tinyint", -6) +case object ShortIntType extends DataType("short", 5) +case object IntType extends DataType("int", 4) +case object LongType extends DataType("long", -5) +case object BigIntType extends DataType("bigint", -5) +case object FloatType extends DataType("float", 6) +case object DoubleType extends DataType("double", 8) +case object CharType extends DataType("char", 1) +case object VarcharType extends DataType("varchar", 12) +case object DateType extends DataType("date", 91) +case object TimestampType extends DataType("timestamp", 93) +case object BinaryType extends DataType("binary", -2) +case object DecimalType extends DataType("decimal", 3) +case object ArrayType extends DataType("array", 2003) +case object MapType extends DataType("map", 2000) +case object ListType extends DataType("list", 2001) +case object StructType extends DataType("struct", 2002) +case object BigDecimalType extends DataType("bigdecimal", 3) + +case class Column(columnName: String, dataType: DataType, comment: String) { + + def toArray: Array[Any] = { + Array[Any](columnName, dataType, comment) + } + + override def toString: String = + s"columnName:$columnName,dataType:$dataType,comment:$comment" + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala new file mode 100644 index 0000000000..667f1a635e --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.domain + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FAILED_TO_READ_INTEGER +import org.apache.linkis.storage.exception.{ + StorageErrorCode, + StorageErrorException, + StorageWarnException +} +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import java.io.{InputStream, IOException} + +object Dolphin extends Logging { + + val CHAR_SET = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue + val MAGIC = "dolphin" + + val MAGIC_BYTES = MAGIC.getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue) + val MAGIC_LEN = MAGIC_BYTES.length + + val DOLPHIN_FILE_SUFFIX = ".dolphin" + + val COL_SPLIT = "," + val COL_SPLIT_BYTES = COL_SPLIT.getBytes("utf-8") + val COL_SPLIT_LEN = COL_SPLIT_BYTES.length + + val NULL = "NULL" + val NULL_BYTES = NULL.getBytes("utf-8") + + val LINKIS_NULL = "LINKIS_NULL" + val LINKIS_NULL_BYTES = LINKIS_NULL.getBytes("utf-8") + + val INT_LEN = 10 + + val FILE_EMPTY = 31 + + def getBytes(value: Any): Array[Byte] = { + value.toString.getBytes(CHAR_SET) + } + + /** + * Convert a bytes array to a String content 将bytes数组转换为String内容 + * @param bytes + * @param start + * @param len + * @return + */ + def getString(bytes: Array[Byte], start: Int, len: Int): String = { + try { + new String(bytes, start, len, Dolphin.CHAR_SET) + } catch { + case e: OutOfMemoryError => + logger.error("bytes to String oom {} Byte", bytes.length) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + } + + def toStringValue(value: String): String = { + if (LINKIS_NULL.equals(value)) { + NULL + } else { + value + } + } + + /** + * Read an integer value that converts the array to a byte of length 10 bytes + * 读取整数值,该值为将数组转换为10字节长度的byte + * @param inputStream + * @return + */ + def readInt(inputStream: InputStream): Int = { + val bytes = new Array[Byte](INT_LEN + 1) + if (StorageUtils.readBytes(inputStream, bytes, INT_LEN) != INT_LEN) { + throw new StorageWarnException( + FAILED_TO_READ_INTEGER.getErrorCode, + FAILED_TO_READ_INTEGER.getErrorDesc + ) + } + getString(bytes, 0, INT_LEN).toInt + } + + /** + * Print integers at a fixed length(将整数按固定长度打印) + * @param value + * @return + */ + def getIntBytes(value: Int): Array[Byte] = { + val str = value.toString + val res = "0" * (INT_LEN - str.length) + str + Dolphin.getBytes(res) + } + + def getType(inputStream: InputStream): String = { + val bytes = new Array[Byte](100) + val len = StorageUtils.readBytes(inputStream, bytes, Dolphin.MAGIC_LEN + INT_LEN) + if (len == -1) return null + getType(Dolphin.getString(bytes, 0, len)) + } + + def getType(content: String): String = { + if (content.length < MAGIC.length || content.substring(0, MAGIC.length) != MAGIC) { + throw new IOException(s"File header type must be dolphin,content:$content is not") + } + content.substring(MAGIC.length, MAGIC.length + INT_LEN).toInt.toString + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala new file mode 100644 index 0000000000..fac0a2d01b --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.domain + +import java.lang.reflect.Type + +import com.google.gson.GsonBuilder + +/** + * @param id + * Engine unique Id(engine唯一的Id) + * @param fsType + * Fs type(fs类型) + * @param creatorUser + * Create a user to start the corresponding jvm user(创建用户为对应启动的jvm用户) + * @param proxyUser + * Proxy user(代理用户) + * @param clientIp + * client Ip for whitelist control(ip用于白名单控制) + * @param methodName + * Method name called(调用的方法名) + * @param params + * Method parameter(方法参数) + */ +case class MethodEntity( + id: Long, + fsType: String, + creatorUser: String, + proxyUser: String, + clientIp: String, + methodName: String, + params: Array[AnyRef] +) { + + override def toString: String = { + s"id:$id, methodName:$methodName, fsType:$fsType, " + + s"creatorUser:$creatorUser, proxyUser:$proxyUser, clientIp:$clientIp, " + } + +} + +object MethodEntitySerializer { + + val gson = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create + + /** + * Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 + * @param code + * @return + */ + def deserializer(code: String): MethodEntity = gson.fromJson(code, classOf[MethodEntity]) + + /** + * Serialize MethodEntity to code 序列化MethodEntity为code + * @param methodEntity + * @return + */ + def serializer(methodEntity: MethodEntity): String = gson.toJson(methodEntity) + + /** + * Serialize a java object as a string 序列化java对象为字符串 + * @param value + * @return + */ + def serializerJavaObject(value: Any): String = gson.toJson(value) + + /** + * Deserialize a string into a java object 将字符串解序列化为java对象 + * @param json + * @param classType + * @tparam T + * @return + */ + def deserializerToJavaObject[T](json: String, classType: Class[T]): T = { + gson.fromJson(json, classType) + } + + def deserializerToJavaObject[T](json: String, oType: Type): T = { + gson.fromJson(json, oType) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala new file mode 100644 index 0000000000..621145cb4d --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel + +import org.apache.linkis.common.io.FsReader + +abstract class ExcelFsReader extends FsReader {} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptMetaData.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala similarity index 59% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptMetaData.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala index 28999aae75..079920bef3 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptMetaData.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala @@ -15,27 +15,33 @@ * limitations under the License. */ -package org.apache.linkis.storage.script; +package org.apache.linkis.storage.excel -import org.apache.linkis.common.io.MetaData; +import org.apache.linkis.common.io.FsWriter -public class ScriptMetaData implements MetaData { - private Variable[] variables; +import java.io.OutputStream - public ScriptMetaData(Variable[] variables) { - this.variables = variables; - } +abstract class ExcelFsWriter extends FsWriter { + val charset: String + val sheetName: String + val dateFormat: String + val autoFormat: Boolean +} - @Override - public MetaData cloneMeta() { - return new ScriptMetaData(variables.clone()); - } +object ExcelFsWriter { - public Variable[] getMetaData() { - return variables; - } + def getExcelFsWriter( + charset: String, + sheetName: String, + dateFormat: String, + outputStream: OutputStream, + autoFormat: Boolean + ): ExcelFsWriter = new StorageExcelWriter( + charset, + sheetName, + dateFormat, + outputStream: OutputStream, + autoFormat: Boolean + ) - public void setMetaData(Variable[] variables) { - this.variables = variables; - } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java new file mode 100644 index 0000000000..aac0ff54a6 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java @@ -0,0 +1,378 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel; + +import org.apache.poi.hssf.eventusermodel.EventWorkbookBuilder.SheetRecordCollectingListener; +import org.apache.poi.hssf.eventusermodel.*; +import org.apache.poi.hssf.eventusermodel.dummyrecord.LastCellOfRowDummyRecord; +import org.apache.poi.hssf.eventusermodel.dummyrecord.MissingCellDummyRecord; +import org.apache.poi.hssf.model.HSSFFormulaParser; +import org.apache.poi.hssf.record.*; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.poifs.filesystem.POIFSFileSystem; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + + + +public class ExcelXlsReader implements HSSFListener { + private int minColumns = -1; + + private POIFSFileSystem fs; + + private InputStream inputStream; + + private int lastRowNumber; + + private int lastColumnNumber; + + /** + * Should we output the formula, or the value it has? + */ + private boolean outputFormulaValues = true; + + /** + * For parsing Formulas + */ + private SheetRecordCollectingListener workbookBuildingListener; + + // excel2003Workbook(工作薄) + private HSSFWorkbook stubWorkbook; + + // Records we pick up as we process + private SSTRecord sstRecord; + + private FormatTrackingHSSFListener formatListener; + + // Table index(表索引) + private int sheetIndex = -1; + + private BoundSheetRecord[] orderedBSRs; + + @SuppressWarnings("unchecked") + private ArrayList boundSheetRecords = new ArrayList(); + + // For handling formulas with string results + private int nextRow; + + private int nextColumn; + + private boolean outputNextStringRecord; + + // Current line(当前行) + private int curRow = 0; + + // a container that stores row records(存储行记录的容器) + private List rowlist = new ArrayList(); + + + @SuppressWarnings("unused") + private String sheetName; + + private IExcelRowDeal excelRowDeal; + + + public void init(IExcelRowDeal excelRowDeal, InputStream inputStream) { + this.excelRowDeal = excelRowDeal; + this.inputStream = inputStream; + } + + + /** + * Traverse all the sheets under excel + * 遍历excel下所有的sheet + * @throws IOException + */ + public void process() throws IOException { + this.fs = new POIFSFileSystem(this.inputStream); + MissingRecordAwareHSSFListener listener = new MissingRecordAwareHSSFListener(this); + formatListener = new FormatTrackingHSSFListener(listener); + HSSFEventFactory factory = new HSSFEventFactory(); + HSSFRequest request = new HSSFRequest(); + if (outputFormulaValues) { + request.addListenerForAllRecords(formatListener); + } else { + workbookBuildingListener = new SheetRecordCollectingListener(formatListener); + request.addListenerForAllRecords(workbookBuildingListener); + } + factory.processWorkbookEvents(request, fs); + } + + /** + * HSSFListener listener method, processing Record + * HSSFListener 监听方法,处理 Record + */ + @Override + @SuppressWarnings("unchecked") + public void processRecord(Record record) { + int thisRow = -1; + int thisColumn = -1; + String thisStr = null; + String value = null; + switch (record.getSid()) { + case BoundSheetRecord.sid: + boundSheetRecords.add(record); + break; + case BOFRecord.sid: + BOFRecord br = (BOFRecord) record; + if (br.getType() == BOFRecord.TYPE_WORKSHEET) { + // Create a child workbook if needed(如果有需要,则建立子工作薄) + if (workbookBuildingListener != null && stubWorkbook == null) { + stubWorkbook = workbookBuildingListener.getStubHSSFWorkbook(); + } + + sheetIndex++; + if (orderedBSRs == null) { + orderedBSRs = BoundSheetRecord.orderByBofPosition(boundSheetRecords); + } + sheetName = orderedBSRs[sheetIndex].getSheetname(); + } + break; + + case SSTRecord.sid: + sstRecord = (SSTRecord) record; + break; + + case BlankRecord.sid: + BlankRecord brec = (BlankRecord) record; + thisRow = brec.getRow(); + thisColumn = brec.getColumn(); + thisStr = ""; + rowlist.add(thisColumn, thisStr); + break; + case BoolErrRecord.sid: // Cell is boolean(单元格为布尔类型) + BoolErrRecord berec = (BoolErrRecord) record; + thisRow = berec.getRow(); + thisColumn = berec.getColumn(); + thisStr = berec.getBooleanValue() + ""; + rowlist.add(thisColumn, thisStr); + break; + + case FormulaRecord.sid: // Cell is a formula type(单元格为公式类型) + FormulaRecord frec = (FormulaRecord) record; + thisRow = frec.getRow(); + thisColumn = frec.getColumn(); + if (outputFormulaValues) { + if (Double.isNaN(frec.getValue())) { + // Formula result is a string + // This is stored in the next record + outputNextStringRecord = true; + nextRow = frec.getRow(); + nextColumn = frec.getColumn(); + } else { + thisStr = formatListener.formatNumberDateCell(frec); + } + } else { + thisStr = '"' + HSSFFormulaParser.toFormulaString(stubWorkbook, frec.getParsedExpression()) + '"'; + } + rowlist.add(thisColumn, thisStr); + break; + case StringRecord.sid:// a string of formulas in a cell(单元格中公式的字符串) + if (outputNextStringRecord) { + // String for formula + StringRecord srec = (StringRecord) record; + thisStr = srec.getString(); + thisRow = nextRow; + thisColumn = nextColumn; + outputNextStringRecord = false; + } + break; + case LabelRecord.sid: + LabelRecord lrec = (LabelRecord) record; + curRow = thisRow = lrec.getRow(); + thisColumn = lrec.getColumn(); + value = lrec.getValue().trim(); + value = value.equals("") ? " " : value; + this.rowlist.add(thisColumn, value); + break; + case LabelSSTRecord.sid: // Cell is a string type(单元格为字符串类型) + LabelSSTRecord lsrec = (LabelSSTRecord) record; + curRow = thisRow = lsrec.getRow(); + thisColumn = lsrec.getColumn(); + if (sstRecord == null) { + rowlist.add(thisColumn, " "); + } else { + value = sstRecord.getString(lsrec.getSSTIndex()).toString().trim(); + value = value.equals("") ? " " : value; + rowlist.add(thisColumn, value); + } + break; + case NumberRecord.sid: // Cell is a numeric type(单元格为数字类型) + NumberRecord numrec = (NumberRecord) record; + curRow = thisRow = numrec.getRow(); + thisColumn = numrec.getColumn(); + value = formatListener.formatNumberDateCell(numrec).trim(); + value = value.equals("") ? "0" : value; + // Add column values to the container(向容器加入列值) + rowlist.add(thisColumn, value); + break; + default: + break; + } + + // Encountered a new line of operations(遇到新行的操作)( + if (thisRow != -1 && thisRow != lastRowNumber) { + lastColumnNumber = -1; + } + + // Null operation(空值的操作) + if (record instanceof MissingCellDummyRecord) { + MissingCellDummyRecord mc = (MissingCellDummyRecord) record; + curRow = thisRow = mc.getRow(); + thisColumn = mc.getColumn(); + rowlist.add(thisColumn, " "); + } + + // Update row and column values(更新行和列的值) + if (thisRow > -1) + lastRowNumber = thisRow; + if (thisColumn > -1) + lastColumnNumber = thisColumn; + + // End of line operation(行结束时的操作) + if (record instanceof LastCellOfRowDummyRecord) { + if (minColumns > 0) { + // Column value is re-empted(列值重新置空) + if (lastColumnNumber == -1) { + lastColumnNumber = 0; + } + } + lastColumnNumber = -1; + + // At the end of each line, the dealRow() method(每行结束时, dealRow() 方法) + excelRowDeal.dealRow(orderedBSRs, sheetIndex, curRow, rowlist); + // Empty container(清空容器) + rowlist.clear(); + } + } + + public void close() { + try { + if (fs != null) { + fs.close(); + } + if (inputStream != null) { + inputStream.close(); + } + } catch (Exception e) { + + } + } + + public static void main(String[] args) { + + } +} + +class ExcelAnalysisException extends RuntimeException { + public ExcelAnalysisException() { + } + + public ExcelAnalysisException(String message) { + super(message); + } + + public ExcelAnalysisException(String message, Throwable cause) { + super(message, cause); + } + + public ExcelAnalysisException(Throwable cause) { + super(cause); + } +} + +interface IExcelRowDeal { + void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist); +} + + +class FirstRowDeal implements IExcelRowDeal { + + private List sheetNames = new ArrayList<>(); + private List row; + + public List getSheetNames() { + return sheetNames; + } + + public void setSheetNames(List sheetNames) { + this.sheetNames = sheetNames; + } + + public List getRow() { + return row; + } + + public void setRow(List row) { + this.row = row; + } + + @Override + public void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { + for (BoundSheetRecord record : orderedBSRs) { + sheetNames.add(record.getSheetname()); + } + row = rowlist; + throw new ExcelAnalysisException("Finished to deal first row"); + } + +} + +class RowToCsvDeal implements IExcelRowDeal { + + private Map params; + private List sheetNames; + private OutputStream outputStream; + private Boolean hasHeader; + private Boolean fisrtRow = true; + public void init(Boolean hasHeader, List sheetNames, OutputStream outputStream) { + this.hasHeader = hasHeader; + this.sheetNames = sheetNames; + this.outputStream = outputStream; + } + + @Override + public void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { + String sheetName = orderedBSRs[sheetIndex].getSheetname(); + if (sheetNames == null || sheetNames.isEmpty() || sheetNames.contains(sheetName)) { + if (! (curRow == 0 && hasHeader)) { + try { + if (fisrtRow) { + fisrtRow = false; + } else { + outputStream.write("\n".getBytes()); + } + int len = rowlist.size(); + for (int i = 0; i < len; i ++) { + outputStream.write(rowlist.get(i).replaceAll("\n|\t", " ").getBytes("utf-8")); + if (i < len -1) { + outputStream.write("\t".getBytes()); + } + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } +} \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala similarity index 91% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala index 0361b43af3..4a4f9bdf16 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.manager.rm; +package org.apache.linkis.storage.excel -public interface ResultResource {} +class StorageExcelReader {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala new file mode 100644 index 0000000000..9ea83130df --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala @@ -0,0 +1,236 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.domain._ +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +import org.apache.commons.io.IOUtils +import org.apache.poi.ss.usermodel._ +import org.apache.poi.xssf.streaming.{SXSSFCell, SXSSFSheet, SXSSFWorkbook} + +import java.io._ +import java.math.BigDecimal +import java.util +import java.util.Date + +import scala.collection.mutable.ArrayBuffer + +class StorageExcelWriter( + val charset: String, + val sheetName: String, + val dateFormat: String, + val outputStream: OutputStream, + val autoFormat: Boolean +) extends ExcelFsWriter + with Logging { + + protected var workBook: SXSSFWorkbook = _ + protected var sheet: SXSSFSheet = _ + protected var format: DataFormat = _ + protected var types: Array[DataType] = _ + protected var rowPoint = 0 + protected var columnCounter = 0 + protected val styles = new util.HashMap[String, CellStyle]() + protected var isFlush = true + protected val os = new ByteArrayOutputStream() + protected var is: ByteArrayInputStream = _ + + def init: Unit = { + workBook = new SXSSFWorkbook() + sheet = workBook.createSheet(sheetName) + } + + def getDefaultHeadStyle: CellStyle = { + val headerFont = workBook.createFont + headerFont.setBold(true) + headerFont.setFontHeightInPoints(14.toShort) + headerFont.setColor(IndexedColors.BLACK.getIndex) + val headerCellStyle = workBook.createCellStyle + headerCellStyle.setFont(headerFont) + headerCellStyle + } + + def getWorkBook: Workbook = { + // 自适应列宽 + sheet.trackAllColumnsForAutoSizing() + for (elem <- 0 to columnCounter) { + sheet.autoSizeColumn(elem) + } + workBook + } + + def createCellStyle(dataType: DataType): CellStyle = { + val style = workBook.createCellStyle() + format = workBook.createDataFormat() + dataType.toString match { + case _ => style.setDataFormat(format.getFormat("@")) + } + if (autoFormat) { + dataType match { + case StringType => style.setDataFormat(format.getFormat("@")) + case TinyIntType => style.setDataFormat(format.getFormat("#")) + case ShortIntType => style.setDataFormat(format.getFormat("#")) + case IntType => style.setDataFormat(format.getFormat("#")) + case LongType => style.setDataFormat(format.getFormat("#.##E+00")) + case BigIntType => style.setDataFormat(format.getFormat("#.##E+00")) + case FloatType => style.setDataFormat(format.getFormat("#.0000000000")) + case DoubleType => style.setDataFormat(format.getFormat("#.0000000000")) + case CharType => style.setDataFormat(format.getFormat("@")) + case VarcharType => style.setDataFormat(format.getFormat("@")) + case DateType => style.setDataFormat(format.getFormat("m/d/yy h:mm")) + case TimestampType => style.setDataFormat(format.getFormat("m/d/yy h:mm")) + case DecimalType => style.setDataFormat(format.getFormat("#.000000000")) + case BigDecimalType => style.setDataFormat(format.getFormat("#.000000000")) + case _ => style.setDataFormat(format.getFormat("@")) + } + } + style + } + + def getCellStyle(dataType: DataType): CellStyle = { + val style = styles.get(dataType.typeName) + if (style == null) { + val newStyle = createCellStyle(dataType) + styles.put(dataType.typeName, newStyle) + newStyle + } else { + style + } + } + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + init + val tableHead = sheet.createRow(0) + val columns = metaData.asInstanceOf[TableMetaData].columns + val columnType = new ArrayBuffer[DataType]() + for (elem <- columns) { + val headCell = tableHead.createCell(columnCounter) + headCell.setCellValue(elem.columnName) + headCell.setCellStyle(getDefaultHeadStyle) + columnType += elem.dataType + columnCounter += 1 + } + types = columnType.toArray + rowPoint += 1 + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + // TODO: 是否需要替换null值 + val tableBody = sheet.createRow(rowPoint) + var colunmPoint = 0 + val excelRecord = record.asInstanceOf[TableRecord].row + for (elem <- excelRecord) { + val cell = tableBody.createCell(colunmPoint) + val dataType = types.apply(colunmPoint) + if (autoFormat) { + setCellTypeValue(dataType, elem, cell) + } else { + cell.setCellValue(DataType.valueToString(elem)) + } + cell.setCellStyle(getCellStyle(dataType)) + colunmPoint += 1 + } + rowPoint += 1 + } + + private def setCellTypeValue(dataType: DataType, elem: Any, cell: SXSSFCell): Unit = { + if (null == elem) return + Utils.tryCatch { + dataType match { + case StringType => cell.setCellValue(DataType.valueToString(elem)) + case TinyIntType => cell.setCellValue(elem.toString.toInt) + case ShortIntType => cell.setCellValue(elem.toString.toInt) + case IntType => cell.setCellValue(elem.toString.toInt) + case LongType => cell.setCellValue(elem.toString.toLong) + case BigIntType => cell.setCellValue(elem.toString.toLong) + case FloatType => cell.setCellValue(elem.toString.toFloat) + case DoubleType => + doubleCheck(elem.toString) + cell.setCellValue(elem.toString.toDouble) + case CharType => cell.setCellValue(DataType.valueToString(elem)) + case VarcharType => cell.setCellValue(DataType.valueToString(elem)) + case DateType => cell.setCellValue(getDate(elem)) + case TimestampType => cell.setCellValue(getDate(elem)) + case DecimalType => + doubleCheck(DataType.valueToString(elem)) + cell.setCellValue(DataType.valueToString(elem).toDouble) + case BigDecimalType => + doubleCheck(DataType.valueToString(elem)) + cell.setCellValue(DataType.valueToString(elem).toDouble) + case _ => + val value = DataType.valueToString(elem) + cell.setCellValue(value) + } + } { case e: Exception => + cell.setCellValue(DataType.valueToString(elem)) + } + } + + private def getDate(value: Any): Date = { + if (value.isInstanceOf[Date]) { + value.asInstanceOf[Date] + } else { + throw new NumberFormatException( + s"Value ${value} with class : ${value.getClass.getName} is not a valid type of Date." + ); + } + } + + /** + * Check whether the double exceeds the number of digits, which will affect the data accuracy + * @param elemValue + */ + private def doubleCheck(elemValue: String): Unit = { + val value = new BigDecimal(elemValue).stripTrailingZeros + if ((value.precision - value.scale) > 15) { + throw new NumberFormatException( + s"Value ${elemValue} error : This data exceeds 15 significant digits." + ); + } + } + + override def flush(): Unit = { + getWorkBook.write(os) + val content: Array[Byte] = os.toByteArray + is = new ByteArrayInputStream(content) + val buffer: Array[Byte] = new Array[Byte](1024) + var bytesRead: Int = 0 + while (isFlush) { + bytesRead = is.read(buffer, 0, 1024) + if (bytesRead == -1) { + isFlush = false + } else { + outputStream.write(buffer, 0, bytesRead) + } + } + } + + override def close(): Unit = { + if (isFlush) flush() + IOUtils.closeQuietly(outputStream) + IOUtils.closeQuietly(is) + IOUtils.closeQuietly(os) + IOUtils.closeQuietly(workBook) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala similarity index 58% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala index a028397bd6..b47e04394f 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala @@ -15,44 +15,39 @@ * limitations under the License. */ -package org.apache.linkis.storage.excel; +package org.apache.linkis.storage.excel -import org.apache.poi.xssf.streaming.SXSSFWorkbook; +import org.apache.poi.xssf.streaming.SXSSFWorkbook -import java.io.OutputStream; +import java.io.OutputStream -public class StorageMultiExcelWriter extends StorageExcelWriter { +class StorageMultiExcelWriter( + override val outputStream: OutputStream, + override val autoFormat: Boolean +) extends StorageExcelWriter(null, null, null, outputStream, autoFormat) { - private int sheetIndex = 0; + private var sheetIndex = 0 - public StorageMultiExcelWriter(OutputStream outputStream, boolean autoFormat) { - super(null, null, null, outputStream, autoFormat); - } - - @Override - public void init() { - if (workBook == null) { - workBook = new SXSSFWorkbook(); - } + override def init: Unit = { + if (workBook == null) workBook = new SXSSFWorkbook() // 1.让表自适应列宽 if (sheet != null) { - sheet.trackAllColumnsForAutoSizing(); - for (int i = 0; i <= columnCounter; i++) { - sheet.autoSizeColumn(i); - } + sheet.trackAllColumnsForAutoSizing() + 0 to columnCounter foreach (sheet.autoSizeColumn) } // 2.重置参数 // 2.1 columnCounter 归0 - columnCounter = 0; + columnCounter = 0 // 2.2 创建新sheet - sheet = workBook.createSheet("resultset" + sheetIndex); + sheet = workBook.createSheet(s"resultset$sheetIndex") // 2.3 sheetIndex自增 - sheetIndex++; + sheetIndex += 1 // 2.4 types 置空 - types = null; + types = null // 2.5 rowPoint 归0 记录行数 - rowPoint = 0; + rowPoint = 0 // 2.6 styles 清空 - styles.clear(); + styles.clear() } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala new file mode 100644 index 0000000000..b21bf7e492 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.io + +import org.apache.linkis.storage.domain.MethodEntity +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOC +import org.apache.linkis.storage.exception.StorageErrorException + +import java.util.UUID + +import org.slf4j.{Logger, LoggerFactory} + +/** + * IOClient is used to execute the proxy as the ujes code execution entry in io and get the return + * result. IOClient用于在io进行代理作为ujes的代码执行入口,并获取返回结果 + */ +trait IOClient { + + def execute(user: String, methodEntity: MethodEntity, params: java.util.Map[String, Any]): String + + def executeWithEngine( + user: String, + methodEntity: MethodEntity, + params: java.util.Map[String, Any] + ): Array[String] + +} + +object IOClient { + val logger: Logger = LoggerFactory.getLogger(classOf[IOClient]) + var ioClient: IOClient = null + + val SUCCESS = "SUCCESS" + val FAILED = "FAILED" + + def getIOClient(): IOClient = { + if (ioClient == null) { + throw new StorageErrorException( + MUST_REGISTER_TOC.getErrorCode, + MUST_REGISTER_TOC.getErrorDesc + ) + } + ioClient + } + + /** + * This method is called when ioClient is initialized. ioClient初始化时会调用该方法 + * @param client + * IOClient + */ + def register(client: IOClient): Unit = { + this.ioClient = client + logger.debug(s"IOClient: ${ioClient.toString} registered") + } + + def getFSId(): String = { + UUID.randomUUID().toString + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClientFactory.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala similarity index 53% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClientFactory.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala index 244ad59477..51e1589eb7 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClientFactory.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala @@ -15,43 +15,39 @@ * limitations under the License. */ -package org.apache.linkis.storage.io; +package org.apache.linkis.storage.io -import org.apache.linkis.storage.exception.StorageErrorException; +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM +import org.apache.linkis.storage.exception.StorageErrorException -import java.util.UUID; +import org.springframework.cglib.proxy.MethodInterceptor -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +trait IOMethodInterceptorCreator { -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOC; - -public class IOClientFactory { - private static final Logger logger = LoggerFactory.getLogger(IOClientFactory.class); - private static IOClient ioClient = null; + def createIOMethodInterceptor(fsName: String): MethodInterceptor +} - private static final String SUCCESS = "SUCCESS"; - private static final String FAILED = "FAILED"; +object IOMethodInterceptorCreator { - public static IOClient getIOClient() throws StorageErrorException { - if (ioClient == null) { - throw new StorageErrorException( - MUST_REGISTER_TOC.getErrorCode(), MUST_REGISTER_TOC.getErrorDesc()); - } - return ioClient; - } + var interceptorCreator: IOMethodInterceptorCreator = null /** * This method is called when ioClient is initialized. ioClient初始化时会调用该方法 * - * @param client IOClient + * @param interceptorCreator */ - public static void register(IOClient client) { - ioClient = client; - logger.debug("IOClient: {} registered", ioClient.toString()); + def register(interceptorCreator: IOMethodInterceptorCreator): Unit = { + this.interceptorCreator = interceptorCreator } - public static String getFSId() { - return UUID.randomUUID().toString(); + def getIOMethodInterceptor(fsName: String): MethodInterceptor = { + if (interceptorCreator == null) { + throw new StorageErrorException( + MUST_REGISTER_TOM.getErrorCode, + MUST_REGISTER_TOM.getErrorDesc + ) + } + interceptorCreator.createIOMethodInterceptor(fsName) } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala new file mode 100644 index 0000000000..d4836731db --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSet +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.{ + THE_FILE_IS_EMPTY, + UNSUPPORTED_RESULT +} +import org.apache.linkis.storage.exception.{StorageErrorException, StorageWarnException} +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import org.apache.commons.lang3.StringUtils + +import java.text.MessageFormat +import java.util +import java.util.Locale + +class DefaultResultSetFactory extends ResultSetFactory with Logging { + + private val resultClasses: Map[String, Class[ResultSet[ResultMetaData, ResultRecord]]] = + StorageUtils.loadClasses( + StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue, + StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue, + t => t.newInstance().resultSetType().toLowerCase(Locale.getDefault) + ) + + val resultTypes = ResultSetFactory.resultSetType.keys.toArray + + override def getResultSetByType(resultSetType: String): ResultSet[_ <: MetaData, _ <: Record] = { + if (!resultClasses.contains(resultSetType)) { + throw new StorageErrorException( + UNSUPPORTED_RESULT.getErrorCode, + MessageFormat.format(UNSUPPORTED_RESULT.getErrorDesc, resultSetType) + ) + } + resultClasses(resultSetType).newInstance() + } + + override def getResultSetByPath(fsPath: FsPath): ResultSet[_ <: MetaData, _ <: Record] = { + getResultSetByPath(fsPath, StorageUtils.getJvmUser) + } + + override def getResultSetByContent(content: String): ResultSet[_ <: MetaData, _ <: Record] = { + getResultSetByType(Dolphin.getType(content)) + } + + override def exists(resultSetType: String): Boolean = resultClasses.contains(resultSetType) + + override def isResultSetPath(path: String): Boolean = { + path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX) + } + + override def isResultSet(content: String): Boolean = + Utils.tryCatch(resultClasses.contains(Dolphin.getType(content))) { t => + logger.info("Wrong result Set: " + t.getMessage) + false + } + + override def getResultSet(output: String): ResultSet[_ <: MetaData, _ <: Record] = + getResultSet(output, StorageUtils.getJvmUser) + + override def getResultSetType: Array[String] = resultTypes + + override def getResultSetByPath(fsPath: FsPath, fs: Fs): ResultSet[_ <: MetaData, _ <: Record] = { + val inputStream = fs.read(fsPath) + val resultSetType = Dolphin.getType(inputStream) + if (StringUtils.isEmpty(resultSetType)) { + throw new StorageWarnException( + THE_FILE_IS_EMPTY.getErrorCode, + MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc, fsPath.getPath) + ) + } + Utils.tryQuietly(inputStream.close()) + // Utils.tryQuietly(fs.close()) + getResultSetByType(resultSetType) + } + + override def getResultSetByPath( + fsPath: FsPath, + proxyUser: String + ): ResultSet[_ <: MetaData, _ <: Record] = { + if (fsPath == null) return null + logger.info("Get Result Set By Path:" + fsPath.getPath) + val fs = FSFactory.getFsByProxyUser(fsPath, proxyUser) + fs.init(new util.HashMap[String, String]()) + val inputStream = fs.read(fsPath) + val resultSetType = Dolphin.getType(inputStream) + if (StringUtils.isEmpty(resultSetType)) { + throw new StorageWarnException( + THE_FILE_IS_EMPTY.getErrorCode, + MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc, fsPath.getPath) + ) + } + Utils.tryQuietly(inputStream.close()) + Utils.tryQuietly(fs.close()) + getResultSetByType(resultSetType) + } + + override def getResultSet( + output: String, + proxyUser: String + ): ResultSet[_ <: MetaData, _ <: Record] = { + if (isResultSetPath(output)) { + getResultSetByPath(new FsPath(output), proxyUser) + } else if (isResultSet(output)) { + getResultSetByContent(output) + } else null + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultRecord.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultMetaData.scala similarity index 85% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultRecord.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultMetaData.scala index ce2686099b..7871bb2ac9 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultRecord.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultMetaData.scala @@ -15,8 +15,8 @@ * limitations under the License. */ -package org.apache.linkis.storage.resultset; +package org.apache.linkis.storage.resultset -import org.apache.linkis.common.io.Record; +import org.apache.linkis.common.io.MetaData -public interface ResultRecord extends Record {} +abstract class ResultMetaData extends MetaData {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala new file mode 100644 index 0000000000..ab9244ca98 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.Record + +abstract class ResultRecord extends Record {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala new file mode 100644 index 0000000000..e6615873b1 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSet + +import scala.collection.mutable + +trait ResultSetFactory extends scala.AnyRef { + + def getResultSetByType(resultSetType: scala.Predef.String): ResultSet[_ <: MetaData, _ <: Record] + + def getResultSetByPath(fsPath: FsPath): ResultSet[_ <: MetaData, _ <: Record] + def getResultSetByPath(fsPath: FsPath, fs: Fs): ResultSet[_ <: MetaData, _ <: Record] + def getResultSetByContent(content: scala.Predef.String): ResultSet[_ <: MetaData, _ <: Record] + def exists(resultSetType: scala.Predef.String): scala.Boolean + def isResultSetPath(path: scala.Predef.String): scala.Boolean + def isResultSet(content: scala.Predef.String): scala.Boolean + def getResultSet(output: String): ResultSet[_ <: MetaData, _ <: Record] + + def getResultSetByPath(fsPath: FsPath, proxyUser: String): ResultSet[_ <: MetaData, _ <: Record] + + def getResultSet(output: String, proxyUser: String): ResultSet[_ <: MetaData, _ <: Record] + + /** + * The first must-time text(第一个必须时text) + * @return + */ + def getResultSetType: Array[String] +} + +object ResultSetFactory { + + val TEXT_TYPE = "1" + val TABLE_TYPE = "2" + val IO_TYPE = "3" + val PICTURE_TYPE = "4" + val HTML_TYPE = "5" + + /** + * TODO 修改为注册形式,并修改ResultSet的getResultType逻辑 Result set corresponding type record(结果集对应类型记录) + */ + val resultSetType = mutable.LinkedHashMap[String, String]( + TEXT_TYPE -> "TEXT", + TABLE_TYPE -> "TABLE", + IO_TYPE -> "IO", + PICTURE_TYPE -> "PICTURE", + HTML_TYPE -> "HTML" + ) + + val factory = new DefaultResultSetFactory + def getInstance: ResultSetFactory = factory +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala new file mode 100644 index 0000000000..e61cf36b3d --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED +import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord, TableResultSet} + +import java.io.InputStream + +object ResultSetReader extends Logging { + + def getResultSetReader[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + inputStream: InputStream + ): ResultSetReader[K, V] = { + new StorageResultSetReader[K, V](resultSet, inputStream) + } + + def getResultSetReader[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + value: String + ): ResultSetReader[K, V] = { + new StorageResultSetReader[K, V](resultSet, value) + } + + def getResultSetReader(res: String): ResultSetReader[_ <: MetaData, _ <: Record] = { + val rsFactory = ResultSetFactory.getInstance + if (rsFactory.isResultSet(res)) { + val resultSet = rsFactory.getResultSet(res) + ResultSetReader.getResultSetReader(resultSet, res) + } else { + val resPath = new FsPath(res) + val resultSet = rsFactory.getResultSetByPath(resPath) + val fs = FSFactory.getFs(resPath) + fs.init(null) + val reader = ResultSetReader.getResultSetReader(resultSet, fs.read(resPath)) + reader match { + case storageResultSetReader: StorageResultSetReader[_, _] => + storageResultSetReader.setFs(fs) + case _ => + } + reader + } + } + + def getTableResultReader(res: String): ResultSetReader[TableMetaData, TableRecord] = { + val rsFactory = ResultSetFactory.getInstance + if (rsFactory.isResultSet(res)) { + val resultSet = rsFactory.getResultSet(res) + if (ResultSetFactory.TABLE_TYPE != resultSet.resultSetType()) { + throw new StorageErrorException( + TABLE_ARE_NOT_SUPPORTED.getErrorCode, + TABLE_ARE_NOT_SUPPORTED.getErrorDesc + ) + } + ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], res) + } else { + val resPath = new FsPath(res) + val resultSet = rsFactory.getResultSetByPath(resPath) + if (ResultSetFactory.TABLE_TYPE != resultSet.resultSetType()) { + throw new StorageErrorException( + TABLE_ARE_NOT_SUPPORTED.getErrorCode, + TABLE_ARE_NOT_SUPPORTED.getErrorDesc + ) + } + val fs = FSFactory.getFs(resPath) + logger.info("Try to init Fs with path:" + resPath.getPath) + fs.init(null) + ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], fs.read(resPath)) + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala new file mode 100644 index 0000000000..cea00bbd60 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} + +import scala.collection.mutable.ArrayBuffer + +object ResultSetWriter { + + def getResultSetWriter[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + maxCacheSize: Long, + storePath: FsPath + ): ResultSetWriter[K, V] = + new StorageResultSetWriter[K, V](resultSet, maxCacheSize, storePath) + + def getResultSetWriter[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + maxCacheSize: Long, + storePath: FsPath, + proxyUser: String + ): ResultSetWriter[K, V] = { + val writer = new StorageResultSetWriter[K, V](resultSet, maxCacheSize, storePath) + writer.setProxyUser(proxyUser) + writer + } + + def getRecordByWriter( + writer: ResultSetWriter[_ <: MetaData, _ <: Record], + limit: Long + ): Array[Record] = { + val res = writer.toString + getRecordByRes(res, limit) + } + + def getRecordByRes(res: String, limit: Long): Array[Record] = { + val reader = ResultSetReader.getResultSetReader(res) + var count = 0 + val records = new ArrayBuffer[Record]() + reader.getMetaData + while (reader.hasNext && count < limit) { + records += reader.getRecord + count = count + 1 + } + records.toArray + } + + def getLastRecordByRes(res: String): Record = { + val reader = ResultSetReader.getResultSetReader(res) + reader.getMetaData + while (reader.hasNext) { + reader.getRecord + } + reader.getRecord + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala new file mode 100644 index 0000000000..7b3aca62d9 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSet +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.utils.StorageConfiguration + +abstract class StorageResultSet[K <: MetaData, V <: Record] extends ResultSet[K, V] with Logging { + + val resultHeaderBytes = Dolphin.MAGIC_BYTES ++ Dolphin.getIntBytes(resultSetType().toInt) + override val charset: String = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue + + override def getResultSetPath(parentDir: FsPath, fileName: String): FsPath = { + val path = if (parentDir.getPath.endsWith("/")) { + parentDir.toPath + fileName + Dolphin.DOLPHIN_FILE_SUFFIX + } else { + parentDir.toPath + "/" + fileName + Dolphin.DOLPHIN_FILE_SUFFIX + } + logger.info(s"Get result set path:${path}") + new FsPath(path) + } + + override def getResultSetHeader: Array[Byte] = resultHeaderBytes + + override def belongToPath(path: String): Boolean = path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX) + + override def belongToResultSet(content: String): Boolean = + Utils.tryCatch(Dolphin.getType(content) == resultSetType()) { t => + logger.info("Wrong result Set: ", t) + false + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala new file mode 100644 index 0000000000..24dd0fdf8a --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary +import org.apache.linkis.storage.exception.{ + ColLengthExceedException, + StorageErrorCode, + StorageErrorException, + StorageWarnException +} +import org.apache.linkis.storage.resultset.table.TableMetaData +import org.apache.linkis.storage.utils.StorageUtils + +import java.io.{ByteArrayInputStream, InputStream, IOException} +import java.text.MessageFormat + +import scala.collection.mutable.ArrayBuffer + +class StorageResultSetReader[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + inputStream: InputStream +) extends ResultSetReader[K, V](resultSet, inputStream) + with Logging { + + private val deserializer = resultSet.createResultSetDeserializer + private var metaData: K = _ + private var row: Record = _ + private var rowCount = 0 + + private var fs: Fs = _ + + def this(resultSet: ResultSet[K, V], value: String) = { + this(resultSet, new ByteArrayInputStream(value.getBytes(Dolphin.CHAR_SET))) + } + + def init(): Unit = { + val resType = Dolphin.getType(inputStream) + if (resultSet.resultSetType != resType) { + throw new IOException( + "File type does not match(文件类型不匹配): " + ResultSetFactory.resultSetType + .getOrElse(resType, "TABLE") + ) + } + } + + /** + * Read a row of data Read the line length first Get the entire row of data by the length of the + * line, first obtain the column length in the entire row of data, and then divide into column + * length to split the data 读取一行数据 先读取行长度 通过行长度获取整行数据,在整行数据中先获取列长度,进而分割成列长度从而分割数据 + * @return + */ + def readLine(): Array[Byte] = { + + var rowLen = 0 + try rowLen = Dolphin.readInt(inputStream) + catch { + case _: StorageWarnException => logger.info(s"Read finished(读取完毕)"); return null + case t: Throwable => throw t + } + + if (rowLen > LinkisStorageConf.LINKIS_READ_ROW_BYTE_MAX_LEN) { + throw new ColLengthExceedException( + LinkisStorageErrorCodeSummary.RESULT_ROW_LENGTH.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_ROW_LENGTH.getErrorDesc, + rowLen.asInstanceOf[Object], + LinkisStorageConf.LINKIS_READ_ROW_BYTE_MAX_LEN.asInstanceOf[Object] + ) + ) + } + + var bytes: Array[Byte] = null + try { + bytes = new Array[Byte](rowLen) + } catch { + case e: OutOfMemoryError => + logger.error("Result set read oom, read size {} Byte", rowLen) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + val len = StorageUtils.readBytes(inputStream, bytes, rowLen) + if (len != rowLen) { + throw new StorageErrorException( + StorageErrorCode.INCONSISTENT_DATA.getCode, + String.format(StorageErrorCode.INCONSISTENT_DATA.getMessage, len.toString, rowLen.toString) + ) + } + rowCount = rowCount + 1 + bytes + } + + @scala.throws[IOException] + override def getRecord: Record = { + if (metaData == null) throw new IOException("Must read metadata first(必须先读取metadata)") + if (row == null) { + throw new IOException( + "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)" + ) + } + row + } + + def setFs(fs: Fs): Unit = this.fs = fs + def getFs: Fs = this.fs + + @scala.throws[IOException] + override def getMetaData: MetaData = { + if (metaData == null) init() + metaData = deserializer.createMetaData(readLine()) + metaData + } + + @scala.throws[IOException] + override def skip(recordNum: Int): Int = { + if (recordNum < 0) return -1 + + if (metaData == null) getMetaData + for (i <- recordNum until (0, -1)) { + try inputStream.skip(Dolphin.readInt(inputStream)) + catch { + case t: Throwable => + return recordNum - i + } + } + recordNum + } + + @scala.throws[IOException] + override def getPosition: Long = rowCount + + @scala.throws[IOException] + override def hasNext: Boolean = { + if (metaData == null) getMetaData + + val line = readLine() + if (line == null) return false + row = deserializer.createRecord(line) + if (row == null) return false + true + } + + @scala.throws[IOException] + override def available: Long = inputStream.available() + + override def close(): Unit = { + inputStream.close() + if (this.fs != null) Utils.tryQuietly(this.fs.close()) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala new file mode 100644 index 0000000000..caed8c0ea0 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSerializer, ResultSet, ResultSetWriter} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.fs.impl.HDFSFileSystem +import org.apache.linkis.storage.utils.{FileSystemUtils, StorageUtils} + +import org.apache.commons.io.IOUtils +import org.apache.hadoop.hdfs.client.HdfsDataOutputStream + +import java.io.{IOException, OutputStream} + +import scala.collection.mutable.ArrayBuffer + +class StorageResultSetWriter[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + maxCacheSize: Long, + storePath: FsPath +) extends ResultSetWriter[K, V]( + resultSet = resultSet, + maxCacheSize = maxCacheSize, + storePath = storePath + ) + with Logging { + + private val serializer: ResultSerializer = resultSet.createResultSetSerializer + + private var moveToWriteRow = false + + private var outputStream: OutputStream = _ + + private var rowCount = 0 + + private val buffer = new ArrayBuffer[Byte]() + + private var fs: Fs = _ + + private var rMetaData: MetaData = _ + + private var proxyUser: String = StorageUtils.getJvmUser + + private var fileCreated = false + + private var closed = false + + private val WRITER_LOCK_CREATE = new Object() + + private val WRITER_LOCK_CLOSE = new Object() + + def getMetaData: MetaData = rMetaData + + def setProxyUser(proxyUser: String): Unit = { + this.proxyUser = proxyUser + } + + def isEmpty: Boolean = { + rMetaData == null && buffer.length <= Dolphin.FILE_EMPTY + } + + def init(): Unit = { + writeLine(resultSet.getResultSetHeader, true) + } + + def createNewFile: Unit = { + if (!fileCreated) { + WRITER_LOCK_CREATE.synchronized { + if (!fileCreated) { + if (storePath != null && outputStream == null) { + logger.info(s"Try to create a new file:${storePath}, with proxy user:${proxyUser}") + fs = FSFactory.getFsByProxyUser(storePath, proxyUser) + fs.init(null) + FileSystemUtils.createNewFile(storePath, proxyUser, true) + + outputStream = fs.write(storePath, true) + fs match { + case fileSystem: FileSystem => + fileSystem.setPermission(storePath, "rwx------") + case _ => + } + logger.info(s"Succeed to create a new file:$storePath") + fileCreated = true + } + } + } + } else if (null != storePath && null == outputStream) { + logger.warn("outputStream had been set null, but createNewFile() was called again.") + } + } + + def writeLine(bytes: Array[Byte], cache: Boolean = false): Unit = { + if (closed) { + logger.warn("the writer had been closed, but writeLine() was still called.") + return + } + if (bytes.length > LinkisStorageConf.ROW_BYTE_MAX_LEN) { + throw new IOException( + s"A single row of data cannot exceed ${LinkisStorageConf.ROW_BYTE_MAX_LEN_STR}" + ) + } + if (buffer.length > maxCacheSize && !cache) { + if (outputStream == null) { + createNewFile + } + flush() + outputStream.write(bytes) + } else { + buffer.appendAll(bytes) + } + } + + override def toString: String = { + if (outputStream == null) { + if (isEmpty) return "" + new String(buffer.toArray, Dolphin.CHAR_SET) + } else { + storePath.getSchemaPath + } + } + + override def toFSPath: FsPath = storePath + + override def addMetaDataAndRecordString(content: String): Unit = { + if (!moveToWriteRow) { + val bytes = content.getBytes(Dolphin.CHAR_SET) + writeLine(bytes) + } + moveToWriteRow = true + } + + override def addRecordString(content: String): Unit = {} + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + if (!moveToWriteRow) { + { + rMetaData = metaData + init() + if (null == metaData) { + writeLine(serializer.metaDataToBytes(metaData), true) + } else { + writeLine(serializer.metaDataToBytes(metaData)) + } + } + moveToWriteRow = true + } + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + if (moveToWriteRow) { + rowCount = rowCount + 1 + writeLine(serializer.recordToBytes(record)) + } + } + + def closeFs: Unit = { + if (fs != null) { + IOUtils.closeQuietly(fs) + fs = null + } + } + + override def close(): Unit = { + if (closed) { + logger.warn("the writer had been closed, but close() was still called.") + return + } else { + WRITER_LOCK_CLOSE.synchronized { + if (!closed) { + closed = true + } else { + return + } + } + } + Utils.tryFinally(if (outputStream != null) flush()) { + if (outputStream != null) { + IOUtils.closeQuietly(outputStream) + outputStream = null + } + closeFs + } + } + + override def flush(): Unit = { + createNewFile + if (outputStream != null) { + if (buffer.nonEmpty) { + outputStream.write(buffer.toArray) + buffer.clear() + } + Utils.tryAndWarnMsg[Unit] { + outputStream match { + case hdfs: HdfsDataOutputStream => + hdfs.hflush() + case _ => + outputStream.flush() + } + }(s"Error encounters when flush result set ") + } + if (closed) { + if (logger.isDebugEnabled()) { + logger.debug("the writer had been closed, but flush() was still called.") + } + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala new file mode 100644 index 0000000000..7a65a9b9d5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.html + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} +import org.apache.linkis.storage.resultset.txt.{TextResultDeserializer, TextResultSerializer} + +class HtmlResultSet extends StorageResultSet[LineMetaData, LineRecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.HTML_TYPE + + override def createResultSetSerializer(): ResultSerializer = new TextResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[LineMetaData, LineRecord] = + new TextResultDeserializer + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala new file mode 100644 index 0000000000..32b578c5a8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.MetaData +import org.apache.linkis.storage.resultset.ResultMetaData + +class IOMetaData(val off: Int, val len: Int) extends ResultMetaData { + override def cloneMeta(): MetaData = new IOMetaData(off, len) +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala new file mode 100644 index 0000000000..54d3dc0224 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.Record +import org.apache.linkis.storage.resultset.ResultRecord + +class IORecord(val value: Array[Byte]) extends ResultRecord { + override def cloneRecord(): Record = new IORecord(value) +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala similarity index 56% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala index 37ae583f88..4e8199da04 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala @@ -15,34 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.protocol.engine; +package org.apache.linkis.storage.resultset.io -import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.storage.domain.Dolphin -public class EngineCreateSuccess implements EngineAsyncResponse { +import org.apache.commons.codec.binary.Base64 - private String id; +class IOResultDeserializer extends ResultDeserializer[IOMetaData, IORecord] { - private EngineNode engineNode; - - public EngineCreateSuccess(String id, EngineNode engineNode) { - this.id = id; - this.engineNode = engineNode; - } - - public String getId() { - return id; + override def createMetaData(bytes: Array[Byte]): IOMetaData = { + val values = Dolphin.getString(bytes, 0, bytes.length).split(Dolphin.COL_SPLIT) + new IOMetaData(values(0).toInt, values(1).toInt) } - public void setId(String id) { - this.id = id; + override def createRecord(bytes: Array[Byte]): IORecord = { + new IORecord(Base64.decodeBase64(Dolphin.getString(bytes, 0, bytes.length))) } - public EngineNode getEngineNode() { - return engineNode; - } - - public void setEngineNode(EngineNode engineNode) { - this.engineNode = engineNode; - } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala new file mode 100644 index 0000000000..be527e3969 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.domain.Dolphin + +import org.apache.commons.codec.binary.Base64 + +class IOResultSerializer extends ResultSerializer { + + override def metaDataToBytes(metaData: MetaData): Array[Byte] = { + val ioMetaData = metaData.asInstanceOf[IOMetaData] + lineToBytes(s"${ioMetaData.off}${Dolphin.COL_SPLIT}${ioMetaData.len}") + } + + override def recordToBytes(record: Record): Array[Byte] = { + val ioRecord = record.asInstanceOf[IORecord] + lineToBytes(Base64.encodeBase64String(ioRecord.value)) + } + + def lineToBytes(value: String): Array[Byte] = { + val bytes = if (value == null) Dolphin.NULL_BYTES else Dolphin.getBytes(value) + Dolphin.getIntBytes(bytes.length) ++ bytes + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala new file mode 100644 index 0000000000..5fd952dbf9 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} + +class IOResultSet extends StorageResultSet[IOMetaData, IORecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.IO_TYPE + + override def createResultSetSerializer(): ResultSerializer = new IOResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[IOMetaData, IORecord] = + new IOResultDeserializer + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala new file mode 100644 index 0000000000..95c1d3ad1f --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.picture + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} +import org.apache.linkis.storage.resultset.txt.{TextResultDeserializer, TextResultSerializer} + +class PictureResultSet extends StorageResultSet[LineMetaData, LineRecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.PICTURE_TYPE + + override def createResultSetSerializer(): ResultSerializer = new TextResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[LineMetaData, LineRecord] = + new TextResultDeserializer + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala similarity index 70% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala index 9646633e3d..526078d6c9 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala @@ -15,22 +15,16 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.protocol; +package org.apache.linkis.storage.resultset.table -public class RequestEngineUnlock implements EngineLock { - private String lock; +import org.apache.linkis.common.io.MetaData +import org.apache.linkis.storage.domain.Column +import org.apache.linkis.storage.resultset.ResultMetaData - public RequestEngineUnlock() {} +class TableMetaData(val columns: Array[Column]) extends ResultMetaData { - public RequestEngineUnlock(String lock) { - this.lock = lock; + override def cloneMeta(): MetaData = { + new TableMetaData(columns) } - public String getLock() { - return lock; - } - - public void setLock(String lock) { - this.lock = lock; - } } diff --git a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala similarity index 71% rename from linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala index dac32f0984..64d2b18b9c 100644 --- a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala @@ -15,12 +15,16 @@ * limitations under the License. */ -package org.apache.linkis.manager.engineplugin.python.exception; +package org.apache.linkis.storage.resultset.table -import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.io.Record +import org.apache.linkis.storage.resultset.ResultRecord +import org.apache.linkis.storage.utils.StorageUtils -public class ExecuteException extends ErrorException { - public ExecuteException(int errCode, String desc) { - super(errCode, desc); +class TableRecord(val row: Array[Any]) extends ResultRecord { + + override def cloneRecord(): Record = { + new TableRecord(row) } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala new file mode 100644 index 0000000000..86d09e9532 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.table + +import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.{Column, DataType, Dolphin} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.PARSING_METADATA_FAILED +import org.apache.linkis.storage.exception.{ + ColLengthExceedException, + ColumnIndexExceedException, + StorageErrorException +} + +import org.apache.commons.lang3.StringUtils + +import java.text.MessageFormat + +import scala.collection.mutable.ArrayBuffer + +class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRecord] with Logging { + + var metaData: TableMetaData = _ + + var columnSet: Set[Int] = null + + import DataType._ + + override def createMetaData(bytes: Array[Byte]): TableMetaData = { + val colByteLen = Dolphin.getString(bytes, 0, Dolphin.INT_LEN).toInt + val colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen) + val colArray = + if (colString.endsWith(Dolphin.COL_SPLIT)) { + colString.substring(0, colString.length - 1).split(Dolphin.COL_SPLIT) + } else colString.split(Dolphin.COL_SPLIT) + var index = Dolphin.INT_LEN + colByteLen + if (colArray.length % 3 != 0) { + throw new StorageErrorException( + PARSING_METADATA_FAILED.getErrorCode, + PARSING_METADATA_FAILED.getErrorDesc + ) + } + val columns = new ArrayBuffer[Column]() + for (i <- 0 until (colArray.length, 3)) { + var len = colArray(i).toInt + val colName = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) + index += len + len = colArray(i + 1).toInt + val colType = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) + index += len + len = colArray(i + 2).toInt + val colComment = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) + index += len + columns += Column(colName, colType, colComment) + } + metaData = new TableMetaData(columns.toArray) + metaData + } + + /** + * colByteLen:All column fields are long(所有列字段长 记录的长度) colString:Obtain column + * length(获得列长):10,20,21 colArray:Column length array(列长数组) Get data by column length(通过列长获得数据) + * @param bytes + * @return + */ + override def createRecord(bytes: Array[Byte]): TableRecord = { + val colByteLen = Dolphin.getString(bytes, 0, Dolphin.INT_LEN).toInt + val colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen) + val colArray = + if (colString.endsWith(Dolphin.COL_SPLIT)) { + colString.substring(0, colString.length - 1).split(Dolphin.COL_SPLIT) + } else colString.split(Dolphin.COL_SPLIT) + var index = Dolphin.INT_LEN + colByteLen + var enableLimit: Boolean = false + if (StringUtils.isNotBlank(LinkisStorageConf.enableLimitThreadLocal.get())) { + enableLimit = true + } + val columnIndices: Array[Int] = LinkisStorageConf.columnIndicesThreadLocal.get() + if (columnSet == null && columnIndices != null) { + columnSet = columnIndices.toSet + } + + val lastIndex = + if (columnIndices != null && columnIndices.length > 0) columnIndices(columnIndices.length - 1) + else 0 + var realValueSize = colArray.size + + if (enableLimit && metaData.columns.size <= columnIndices(0)) { + throw new ColumnIndexExceedException( + LinkisStorageErrorCodeSummary.RESULT_COLUMN_INDEX_OUT_OF_BOUNDS.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_COLUMN_INDEX_OUT_OF_BOUNDS.getErrorDesc, + columnIndices(0).asInstanceOf[Object], + metaData.columns.size.asInstanceOf[Object] + ) + ) + } + + if (enableLimit && metaData.columns.size > lastIndex) { + realValueSize = columnIndices.length + } else if (enableLimit && metaData.columns.size <= lastIndex) { + realValueSize = metaData.columns.size % columnIndices.length + } + + val columnSize = colArray.size + val rowArray = new Array[Any](realValueSize) + + var colIdx = 0 + for (i <- 0 until columnSize) { + val len = colArray(i).toInt + val res = Dolphin.getString(bytes, index, len) + if (res.length > LinkisStorageConf.LINKIS_RESULT_COL_LENGTH && enableLimit) { + throw new ColLengthExceedException( + LinkisStorageErrorCodeSummary.RESULT_COL_LENGTH.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_COL_LENGTH.getErrorDesc, + res.length.asInstanceOf[Object], + LinkisStorageConf.LINKIS_RESULT_COL_LENGTH.asInstanceOf[Object] + ) + ) + } + index += len + // 如果enableLimit为true,则采取的是列分页 + if (enableLimit) { + if (columnSet.contains(i)) { + rowArray(colIdx) = toValue(metaData.columns(i).dataType, res) + colIdx += 1 + } + } else { + if (i >= metaData.columns.length) rowArray(i) = res + else { + rowArray(i) = toValue(metaData.columns(i).dataType, res) + } + } + } + new TableRecord(rowArray) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala new file mode 100644 index 0000000000..5d1738a346 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.table + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.Dolphin + +import scala.collection.mutable.ArrayBuffer + +class TableResultSerializer extends ResultSerializer { + + override def metaDataToBytes(metaData: MetaData): Array[Byte] = { + val tableMetaData = metaData.asInstanceOf[TableMetaData] + lineToBytes(tableMetaData.columns.map(_.toArray).reduce((a1, a2) => a1 ++ a2)) + } + + override def recordToBytes(record: Record): Array[Byte] = { + val tableRecord = record.asInstanceOf[TableRecord] + lineToBytes(tableRecord.row) + } + + /** + * Convert a row of data to an array of Bytes Convert the data to byte and get the corresponding + * total byte length to write to the file Data write format: line length (fixed length) column + * length (fixed length) field index comma segmentation real data For example: + * 000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 The length of the line does not + * include its own length 将一行数据转换为Bytes的数组 对数据转换为byte,并获取相应的总byte长度写入文件 数据写入格式:行长(固定长度) 列长(固定长度) + * 字段索引逗号分割 真实数据 如:000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 其中行长不包括自身长度 + * @param line + */ + def lineToBytes(line: Array[Any]): Array[Byte] = { + val dataBytes = ArrayBuffer[Array[Byte]]() + val colIndex = ArrayBuffer[Array[Byte]]() + var colByteLen = 0 + var length = 0 + line.foreach { data => + val bytes = if (data == null) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + Dolphin.LINKIS_NULL_BYTES + } else { + Dolphin.NULL_BYTES + } + } else { + Dolphin.getBytes(data) + } + dataBytes += bytes + val colBytes = Dolphin.getBytes(bytes.length) + colIndex += colBytes += Dolphin.COL_SPLIT_BYTES + colByteLen += colBytes.length + Dolphin.COL_SPLIT_LEN + length += bytes.length + } + length += colByteLen + Dolphin.INT_LEN + toByteArray(length, colByteLen, colIndex, dataBytes) + } + + /** + * Splice a row of data into a byte array(将一行的数据拼接成byte数组) + * @param length + * The total length of the line data byte, excluding its own length(行数据byte总长度,不包括自身的长度) + * @param colByteLen + * Record field index byte column length(记录字段索引byte的列长) + * @param colIndex + * Field index, including separator comma(字段索引,包括分割符逗号) + * @param dataBytes + * Byte of real data(真实数据的byte) + * @return + */ + def toByteArray( + length: Int, + colByteLen: Int, + colIndex: ArrayBuffer[Array[Byte]], + dataBytes: ArrayBuffer[Array[Byte]] + ): Array[Byte] = { + val row = ArrayBuffer[Byte]() + colIndex ++= dataBytes + row.appendAll(Dolphin.getIntBytes(length)) + row.appendAll(Dolphin.getIntBytes(colByteLen)) + colIndex.foreach(row.appendAll(_)) + row.toArray + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala new file mode 100644 index 0000000000..fe8c4e9cd1 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.table + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} + +class TableResultSet extends StorageResultSet[TableMetaData, TableRecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.TABLE_TYPE + + override def createResultSetSerializer(): ResultSerializer = new TableResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[TableMetaData, TableRecord] = + new TableResultDeserializer + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala new file mode 100644 index 0000000000..b306b1f29a --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.txt + +import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.domain.Dolphin + +class TextResultDeserializer extends ResultDeserializer[LineMetaData, LineRecord] { + + override def createMetaData(bytes: Array[Byte]): LineMetaData = { + new LineMetaData(Dolphin.getString(bytes, 0, bytes.length)) + } + + override def createRecord(bytes: Array[Byte]): LineRecord = { + new LineRecord(Dolphin.getString(bytes, 0, bytes.length)) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala new file mode 100644 index 0000000000..e6e53338d0 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.txt + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.domain.Dolphin + +class TextResultSerializer extends ResultSerializer { + + override def metaDataToBytes(metaData: MetaData): Array[Byte] = { + if (metaData == null) { + lineToBytes(null) + } else { + val textMetaData = metaData.asInstanceOf[LineMetaData] + lineToBytes(textMetaData.getMetaData) + } + } + + override def recordToBytes(record: Record): Array[Byte] = { + val textRecord = record.asInstanceOf[LineRecord] + lineToBytes(textRecord.getLine) + } + + def lineToBytes(value: String): Array[Byte] = { + val bytes = if (value == null) Dolphin.NULL_BYTES else Dolphin.getBytes(value) + Dolphin.getIntBytes(bytes.length) ++ bytes + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala new file mode 100644 index 0000000000..00beb315c5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.txt + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} + +class TextResultSet extends StorageResultSet[LineMetaData, LineRecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.TEXT_TYPE + + override def createResultSetSerializer(): ResultSerializer = new TextResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[LineMetaData, LineRecord] = + new TextResultDeserializer + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala new file mode 100644 index 0000000000..d89074b978 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script + +import org.apache.linkis.common.io.{FsPath, FsReader} +import org.apache.linkis.storage.script.reader.StorageScriptFsReader + +import java.io.InputStream + +abstract class ScriptFsReader extends FsReader { + + val path: FsPath + val charset: String + +} + +object ScriptFsReader { + + def getScriptFsReader(path: FsPath, charset: String, inputStream: InputStream): ScriptFsReader = + new StorageScriptFsReader(path, charset, inputStream) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala new file mode 100644 index 0000000000..dbcbfc126e --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script + +import org.apache.linkis.common.io.{FsPath, FsWriter, MetaData} +import org.apache.linkis.storage.LineRecord +import org.apache.linkis.storage.script.compaction.{ + PYScriptCompaction, + QLScriptCompaction, + ScalaScriptCompaction, + ShellScriptCompaction +} +import org.apache.linkis.storage.script.parser.{ + PYScriptParser, + QLScriptParser, + ScalaScriptParser, + ShellScriptParser +} +import org.apache.linkis.storage.script.writer.StorageScriptFsWriter + +import java.io.{InputStream, OutputStream} + +abstract class ScriptFsWriter extends FsWriter { + + val path: FsPath + val charset: String + + def getInputStream(): InputStream + +} + +object ScriptFsWriter { + + def getScriptFsWriter( + path: FsPath, + charset: String, + outputStream: OutputStream = null + ): ScriptFsWriter = + new StorageScriptFsWriter(path, charset, outputStream) + +} + +object ParserFactory { + + def listParsers(): Array[Parser] = + Array(PYScriptParser(), QLScriptParser(), ScalaScriptParser(), ShellScriptParser()) + +} + +object Compaction { + + def listCompactions(): Array[Compaction] = Array( + PYScriptCompaction(), + QLScriptCompaction(), + ScalaScriptCompaction(), + ShellScriptCompaction() + ) + +} + +trait Parser { + def prefixConf: String + + def prefix: String + + def belongTo(suffix: String): Boolean + + def parse(line: String): Variable + + def getAnnotationSymbol(): String +} + +trait Compaction { + + def prefixConf: String + + def prefix: String + + def belongTo(suffix: String): Boolean + + def compact(variable: Variable): String + + def getAnnotationSymbol(): String +} + +class ScriptMetaData(var variables: Array[Variable]) extends MetaData { + override def cloneMeta(): MetaData = new ScriptMetaData(variables) + + def getMetaData: Array[Variable] = variables + + def setMetaData(variables: Array[Variable]): Unit = { + this.variables = variables + } + +} + +class ScriptRecord(line: String) extends LineRecord(line) + +// definition variable; specialConfiguration ;runConfiguration; startUpConfiguration; +case class Variable(sortParent: String, sort: String, key: String, value: String) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala new file mode 100644 index 0000000000..ac89d19ea2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script + +import java.util + +import scala.collection.mutable.ArrayBuffer + +object VariableParser { + + val CONFIGURATION: String = "configuration" + val VARIABLE: String = "variable" + val RUNTIME: String = "runtime" + val STARTUP: String = "startup" + val SPECIAL: String = "special" + + def getVariables(params: util.Map[String, Object]): Array[Variable] = { + import scala.collection.JavaConverters._ + val variables = new ArrayBuffer[Variable] + params + .getOrDefault(VARIABLE, new util.HashMap[String, Object]) + .asInstanceOf[util.Map[String, Object]] + .asScala + .foreach(f => variables += Variable(VARIABLE, null, f._1, f._2.toString)) + params + .getOrDefault(CONFIGURATION, new util.HashMap[String, Object]) + .asInstanceOf[util.Map[String, Object]] + .asScala + .foreach { f => + f._2 + .asInstanceOf[util.Map[String, Object]] + .asScala + .filter(s => !isContextIDINFO(s._1)) + .foreach(p => + p._2 match { + case e: util.Map[String, Object] => + e.asScala + .filter(s => !isContextIDINFO(s._1)) + .foreach(s => variables += Variable(f._1, p._1, s._1, s._2.toString)) + case _ => + if (null == p._2) { + variables += Variable(CONFIGURATION, f._1, p._1, "") + } else { + variables += Variable(CONFIGURATION, f._1, p._1, p._2.toString) + } + } + ) + } + variables.toArray + } + + // TODO need delete + def isContextIDINFO(key: String): Boolean = { + "contextID".equalsIgnoreCase(key) || "nodeName".equalsIgnoreCase(key) + } + + def getMap(variables: Array[Variable]): util.Map[String, Object] = { + import scala.collection.JavaConverters._ + val vars = new util.HashMap[String, String] + val confs = new util.HashMap[String, Object] + variables.filter(_.sort == null).foreach(f => vars.put(f.key, f.value)) + variables.filter(_.sort != null).foreach { f => + f.sort match { + case STARTUP | RUNTIME | SPECIAL => + if (confs.get(f.sort) == null) { + confs.put(f.sort, createMap(f)) + } else { + confs.get(f.sort).asInstanceOf[util.HashMap[String, Object]].put(f.key, f.value) + } + case _ => + if (confs.get(f.sortParent) == null) { + confs.put(f.sortParent, new util.HashMap[String, Object]) + confs + .get(f.sortParent) + .asInstanceOf[util.HashMap[String, Object]] + .put(f.sort, createMap(f)) + } else { + val subMap = confs.get(f.sortParent).asInstanceOf[util.HashMap[String, Object]] + if (subMap.get(f.sort) == null) { + subMap.put(f.sort, createMap(f)) + } else { + subMap + .get(f.sort) + .asInstanceOf[util.HashMap[String, Object]] + .put(f.key, f.value) + } + } + } + } + val params = new util.HashMap[String, Object] + if (vars.size() > 0) params.asScala += VARIABLE -> vars + if (confs.size() > 0) params.asScala += CONFIGURATION -> confs + params + } + + private def createMap(variable: Variable): util.Map[String, Object] = { + val map = new util.HashMap[String, Object] + map.put(variable.key, variable.value) + map + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineMetaData.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala similarity index 60% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineMetaData.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala index a6dab8b37d..e085d2aacc 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineMetaData.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala @@ -15,30 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.storage; +package org.apache.linkis.storage.script.compaction -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.storage.resultset.ResultMetaData; +import org.apache.linkis.storage.script.{Compaction, Variable} -public class LineMetaData implements ResultMetaData { +abstract class CommonScriptCompaction extends Compaction { - private String metaData = null; - - public LineMetaData() {} - - public LineMetaData(String metaData) { - this.metaData = metaData; - } - - public String getMetaData() { - return metaData; + override def compact(variable: Variable): String = { + variable.sortParent match { + case "variable" => prefix + " " + variable.key + "=" + variable.value + case _ => + prefixConf + " " + variable.sortParent + " " + variable.sort + " " + variable.key + "=" + variable.value + } } - public void setMetaData(String metaData) { - this.metaData = metaData; - } + override def getAnnotationSymbol(): String = prefix.split('@')(0) - public MetaData cloneMeta() { - return new LineMetaData(metaData); - } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala new file mode 100644 index 0000000000..a8e72a2d8f --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.compaction + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class PYScriptCompaction private extends CommonScriptCompaction { + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON + ) + + override def prefix: String = "#@set" + + override def prefixConf: String = "#conf@set" +} + +object PYScriptCompaction { + val pYScriptCompaction: PYScriptCompaction = new PYScriptCompaction + + def apply(): CommonScriptCompaction = pYScriptCompaction +} diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala similarity index 57% rename from linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala index 8856d3a927..7e420f8913 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala @@ -15,21 +15,24 @@ * limitations under the License. */ -package org.apache.linkis.protocol.engine +package org.apache.linkis.storage.script.compaction -object EngineCallback { - private val DWC_APPLICATION_NAME = "dwc.application.name" - private val DWC_INSTANCE = "dwc.application.instance" +import org.apache.linkis.common.utils.CodeAndRunTypeUtils - def mapToEngineCallback(options: Map[String, String]): EngineCallback = - EngineCallback(options(DWC_APPLICATION_NAME), options(DWC_INSTANCE)) +class QLScriptCompaction private extends CommonScriptCompaction { - def callbackToMap(engineCallback: EngineCallback): Map[String, String] = - Map( - DWC_APPLICATION_NAME -> engineCallback.applicationName, - DWC_INSTANCE -> engineCallback.instance + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL ) + override def prefix: String = "--@set" + + override def prefixConf: String = "--conf@set" } -case class EngineCallback(applicationName: String, instance: String) +object QLScriptCompaction { + val qLScriptCompaction: QLScriptCompaction = new QLScriptCompaction + def apply(): CommonScriptCompaction = qLScriptCompaction +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala new file mode 100644 index 0000000000..c75c5a32a5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.compaction + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ScalaScriptCompaction private extends CommonScriptCompaction { + override def prefix: String = "//@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA + ) + + override def prefixConf: String = "//conf@set" +} + +object ScalaScriptCompaction { + private val compaction: ScalaScriptCompaction = new ScalaScriptCompaction + + def apply(): CommonScriptCompaction = compaction +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala new file mode 100644 index 0000000000..7a05580038 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.compaction + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ShellScriptCompaction private extends CommonScriptCompaction { + override def prefixConf: String = "#conf@set" + + override def prefix: String = "#@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL + ) + +} + +object ShellScriptCompaction { + val shellScriptCompaction: ShellScriptCompaction = new ShellScriptCompaction + + def apply(): CommonScriptCompaction = shellScriptCompaction +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala new file mode 100644 index 0000000000..b23a521cd2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.parser + +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER +import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.script.{Parser, Variable, VariableParser} + +abstract class CommonScriptParser extends Parser { + + @scala.throws[StorageErrorException] + def parse(line: String): Variable = { + val variableReg = ("\\s*" + prefix + "\\s*(.+)\\s*" + "=" + "\\s*(.+)\\s*").r + line match { + case variableReg(key, value) => + Variable(VariableParser.VARIABLE, null, key.trim, value.trim) + case _ => + val split = line.split("=") + if (split.length != 2) { + throw new StorageErrorException( + INVALID_CUSTOM_PARAMETER.getErrorCode(), + INVALID_CUSTOM_PARAMETER.getErrorDesc + ) + } + val value = split(1).trim + val subSplit = split(0).split(" ") + if (subSplit.filter(_ != "").size != 4) { + throw new StorageErrorException( + INVALID_CUSTOM_PARAMETER.getErrorCode(), + INVALID_CUSTOM_PARAMETER.getErrorDesc + ) + } + if (!subSplit.filter(_ != "")(0).equals(prefixConf)) { + throw new StorageErrorException( + INVALID_CUSTOM_PARAMETER.getErrorCode(), + INVALID_CUSTOM_PARAMETER.getErrorDesc + ) + } + val sortParent = subSplit.filter(_ != "")(1).trim + val sort = subSplit.filter(_ != "")(2).trim + val key = subSplit.filter(_ != "")(3).trim + Variable(sortParent, sort, key, value) + } + } + + override def getAnnotationSymbol(): String = prefix.split('@')(0) + +} diff --git a/linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala similarity index 59% rename from linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala index 6e82f0ebcc..027b632146 100644 --- a/linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala @@ -15,21 +15,25 @@ * limitations under the License. */ -package org.apache.linkis.errorcode.common +package org.apache.linkis.storage.script.parser -import org.junit.jupiter.api.{Assertions, DisplayName, Test} +import org.apache.linkis.common.utils.CodeAndRunTypeUtils -class LinkisErrorCodeTest { +class PYScriptParser private extends CommonScriptParser { + override def prefix: String = "#@set" - @Test - @DisplayName("linkisErrorCodeTest") - def linkisErrorCodeTest(): Unit = { - val errorCode = new LinkisErrorCode( - "11000", - "Failed to get datasource info from datasource server(从数据源服务器获取数据源信息失败)" + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON ) - Assertions.assertEquals("11000", errorCode.getErrorCode) - } + override def prefixConf: String = "#conf@set" +} + +object PYScriptParser { + val pYScriptParser: PYScriptParser = new PYScriptParser + + def apply(): CommonScriptParser = pYScriptParser } diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala similarity index 59% rename from linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala index c4e9e386cb..a089f9c367 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala @@ -15,24 +15,24 @@ * limitations under the License. */ -package org.apache.linkis.filesystem.response +package org.apache.linkis.storage.script.parser -import org.junit.jupiter.api.{Assertions, DisplayName, Test} +import org.apache.linkis.common.utils.CodeAndRunTypeUtils -class ScriptFromBMLResponseTest { +class QLScriptParser private extends CommonScriptParser { + override def prefix: String = "--@set" - @Test - @DisplayName("commonConst") - def commonConst(): Unit = { - val scriptContent = "show databases;" - val response = new ScriptFromBMLResponse( - scriptContent, - new java.util.HashMap[String, java.util.Map[String, Object]] + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL ) - Assertions.assertEquals(scriptContent, response.scriptContent) - Assertions.assertTrue(response.metadata.size() == 0) + override def prefixConf: String = "--conf@set" +} - } +object QLScriptParser { + val qLScriptParser: QLScriptParser = new QLScriptParser + def apply(): CommonScriptParser = qLScriptParser } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala new file mode 100644 index 0000000000..e6729326e8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.parser + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ScalaScriptParser private extends CommonScriptParser { + // todo To be determined(待定) + override def prefix: String = "//@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA + ) + + override def prefixConf: String = "//conf@set" +} + +object ScalaScriptParser { + val otherScriptParser: ScalaScriptParser = new ScalaScriptParser + + def apply(): CommonScriptParser = otherScriptParser +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala new file mode 100644 index 0000000000..9b3e385ffe --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.parser + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ShellScriptParser private extends CommonScriptParser { + override def prefix: String = "#@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL + ) + + override def prefixConf: String = "#conf@set" +} + +object ShellScriptParser { + val shellScriptParser: ShellScriptParser = new ShellScriptParser + + def apply(): CommonScriptParser = shellScriptParser + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala new file mode 100644 index 0000000000..7b7b85ceb9 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.reader + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.storage.script._ +import org.apache.linkis.storage.utils.StorageUtils + +import org.apache.commons.io.IOUtils + +import java.io._ + +import scala.collection.mutable.ArrayBuffer + +class StorageScriptFsReader(val path: FsPath, val charset: String, val inputStream: InputStream) + extends ScriptFsReader { + + private var inputStreamReader: InputStreamReader = _ + private var bufferedReader: BufferedReader = _ + + private var metadata: ScriptMetaData = _ + + private var variables: ArrayBuffer[Variable] = new ArrayBuffer[Variable]() + private var lineText: String = _ + + @scala.throws[IOException] + override def getRecord: Record = { + + if (metadata == null) throw new IOException("Must read metadata first(必须先读取metadata)") + val record = new ScriptRecord(lineText) + lineText = bufferedReader.readLine() + record + } + + @scala.throws[IOException] + override def getMetaData: MetaData = { + if (metadata == null) init() + val parser = getScriptParser() + lineText = bufferedReader.readLine() + while (hasNext && parser != null && isMetadata(lineText, parser.prefix, parser.prefixConf)) { + variables += parser.parse(lineText) + lineText = bufferedReader.readLine() + } + metadata = new ScriptMetaData(variables.toArray) + metadata + } + + def init(): Unit = { + inputStreamReader = new InputStreamReader(inputStream) + bufferedReader = new BufferedReader(inputStreamReader) + } + + @scala.throws[IOException] + override def skip(recordNum: Int): Int = { + if (recordNum < 0) return -1 + if (metadata == null) getMetaData + try bufferedReader.skip(recordNum).toInt + catch { case t: Throwable => recordNum } + } + + @scala.throws[IOException] + override def getPosition: Long = -1L + + @scala.throws[IOException] + override def hasNext: Boolean = lineText != null + + @scala.throws[IOException] + override def available: Long = if (inputStream != null) inputStream.available() else 0L + + override def close(): Unit = { + IOUtils.closeQuietly(bufferedReader) + IOUtils.closeQuietly(inputStreamReader) + IOUtils.closeQuietly(inputStream) + } + + /** + * Determine if the read line is metadata(判断读的行是否是metadata) + * + * @param line + * @return + */ + def isMetadata(line: String, prefix: String, prefixConf: String): Boolean = { + val regex = ("\\s*" + prefix + "\\s*(.+)\\s*" + "=" + "\\s*(.+)\\s*").r + line match { + case regex(_, _) => true + case _ => + val split: Array[String] = line.split("=") + if (split.size != 2) return false + if (split(0).split(" ").filter(_ != "").size != 4) return false + if (!split(0).split(" ").filter(_ != "")(0).equals(prefixConf)) return false + true + } + } + + /** + * get the script parser according to the path(根据文件路径 获取对应的script parser ) + * @return + * Scripts Parser + */ + + def getScriptParser(): Parser = { + val parsers = + ParserFactory.listParsers().filter(p => p.belongTo(StorageUtils.pathToSuffix(path.getPath))) + if (parsers.length > 0) { + parsers(0) + } else { + null + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala new file mode 100644 index 0000000000..cdb9186da4 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.writer + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.LineRecord +import org.apache.linkis.storage.script.{Compaction, ScriptFsWriter, ScriptMetaData} +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import org.apache.commons.io.IOUtils +import org.apache.hadoop.hdfs.client.HdfsDataOutputStream + +import java.io.{ByteArrayInputStream, InputStream, IOException, OutputStream} +import java.util + +class StorageScriptFsWriter( + val path: FsPath, + val charset: String, + outputStream: OutputStream = null +) extends ScriptFsWriter + with Logging { + + private val stringBuilder = new StringBuilder + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + + val metadataLine = new util.ArrayList[String]() + val compaction = getScriptCompaction() + if (compaction != null) { + + metaData + .asInstanceOf[ScriptMetaData] + .getMetaData + .map(compaction.compact) + .foreach(metadataLine.add) + // add annotition symbol + if (metadataLine.size() > 0) { + metadataLine.add(compaction.getAnnotationSymbol()) + } + if (outputStream != null) { + IOUtils.writeLines(metadataLine, "\n", outputStream, charset) + } else { + import scala.collection.JavaConverters._ + metadataLine.asScala.foreach(m => stringBuilder.append(s"$m\n")) + } + } + + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + // 转成LineRecord而不是TableRecord是为了兼容非Table类型的结果集写到本类中 + val scriptRecord = record.asInstanceOf[LineRecord] + if (outputStream != null) { + IOUtils.write(scriptRecord.getLine, outputStream, charset) + } else { + stringBuilder.append(scriptRecord.getLine) + } + } + + override def close(): Unit = { + if (outputStream != null) { + IOUtils.closeQuietly(outputStream) + } + } + + override def flush(): Unit = if (outputStream != null) { + Utils.tryAndWarnMsg[Unit] { + outputStream match { + case hdfs: HdfsDataOutputStream => + hdfs.hflush() + case _ => + outputStream.flush() + } + }(s"Error encounters when flush script ") + } + + def getInputStream(): InputStream = { + new ByteArrayInputStream( + stringBuilder.toString().getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue) + ) + } + + /** + * get the script compaction according to the path(根据文件路径 获取对应的script Compaction ) + * @return + * Scripts Compaction + */ + + def getScriptCompaction(): Compaction = { + + val compactions = Compaction + .listCompactions() + .filter(p => p.belongTo(StorageUtils.pathToSuffix(path.getPath))) + + if (compactions.length > 0) { + compactions(0) + } else { + null + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala new file mode 100644 index 0000000000..e0fa5b7f40 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.common.io.{FsWriter, MetaData, Record} + +import org.apache.commons.io.IOUtils +import org.apache.commons.math3.util.Pair + +import java.util +import java.util.Arrays + +import scala.collection.JavaConverters._ + +abstract class AbstractFileSource(var fileSplits: Array[FileSplit]) extends FileSource { + + override def shuffle(s: Record => Record): FileSource = { + fileSplits.foreach(_.shuffler = s) + this + } + + override def page(page: Int, pageSize: Int): FileSource = { + fileSplits.foreach(_.page(page, pageSize)) + this + } + + override def addParams(params: util.Map[String, String]): FileSource = { + fileSplits.foreach(_.addParams(params)) + this + } + + override def addParams(key: String, value: String): FileSource = { + fileSplits.foreach(_.addParams(key, value)) + this + } + + override def getFileSplits: Array[FileSplit] = this.fileSplits + + override def getParams: util.Map[String, String] = + fileSplits.map(_.params.asScala).foldLeft(Map[String, String]())(_ ++ _).asJava + + override def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]): Unit = + fileSplits.foreach(_.write(fsWriter)) + + override def close(): Unit = this.fileSplits.foreach(IOUtils.closeQuietly) + + override def collect(): Array[Pair[Object, util.ArrayList[Array[String]]]] = + fileSplits.map(_.collect()) + + override def getTotalLine: Int = this.fileSplits.map(_.totalLine).sum + + override def getTypes: Array[String] = this.fileSplits.map(_.`type`) + + override def getFileInfo(needToCountRowNumber: Int = 5000): Array[Pair[Int, Int]] = + fileSplits.map(_.getFileInfo(needToCountRowNumber)) + + override def limitBytes(limitBytes: Long): FileSource = { + fileSplits.foreach((fileSplit: FileSplit) => fileSplit.setLimitBytes(limitBytes)) + this + } + + override def limitColumnLength(limitColumnLength: Int): FileSource = { + fileSplits.foreach((fileSplit: FileSplit) => fileSplit.setLimitColumnLength(limitColumnLength)) + this + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala new file mode 100644 index 0000000000..4b0b593db6 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.common.io._ +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE +import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader} +import org.apache.linkis.storage.script.ScriptFsReader +import org.apache.linkis.storage.utils.StorageConfiguration + +import org.apache.commons.math3.util.Pair + +import java.io.{Closeable, InputStream} +import java.util + +trait FileSource extends Closeable { + + def shuffle(s: Record => Record): FileSource + + def page(page: Int, pageSize: Int): FileSource + + def collect(): Array[Pair[Object, util.ArrayList[Array[String]]]] + + def getFileInfo(needToCountRowNumber: Int = 5000): Array[Pair[Int, Int]] + + def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]): Unit + + def addParams(params: util.Map[String, String]): FileSource + + def addParams(key: String, value: String): FileSource + + def getParams: util.Map[String, String] + + def getTotalLine: Int + + def limitBytes(limitBytes: Long): FileSource + + def limitColumnLength(limitColumnLength: Int): FileSource + + def getTypes: Array[String] + + def getFileSplits: Array[FileSplit] +} + +object FileSource extends Logging { + + private val fileType = LinkisStorageConf.getFileTypeArr + private val suffixPredicate = (path: String, suffix: String) => path.endsWith(s".$suffix") + + def isResultSet(path: String): Boolean = { + suffixPredicate(path, fileType.head) + } + + def isResultSet(fsPath: FsPath): Boolean = { + isResultSet(fsPath.getPath) + } + + /** + * 目前只支持table多结果集 + * + * @param fsPaths + * @param fs + * @return + */ + def create(fsPaths: Array[FsPath], fs: Fs): FileSource = { + // 非table结果集的过滤掉 + val fileSplits = fsPaths.map(createResultSetFileSplit(_, fs)).filter(isTableResultSet) + new ResultsetFileSource(fileSplits) + } + + private def isTableResultSet(fileSplit: FileSplit): Boolean = + fileSplit.`type`.equals(ResultSetFactory.TABLE_TYPE) + + def isTableResultSet(fileSource: FileSource): Boolean = { + // 分片中全部为table结果集才返回true + fileSource.getFileSplits.forall(isTableResultSet) + } + + def create(fsPath: FsPath, fs: Fs): FileSource = { + if (!canRead(fsPath.getPath)) { + throw new StorageErrorException( + UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode, + UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc + ) + } + if (isResultSet(fsPath)) { + new ResultsetFileSource(Array(createResultSetFileSplit(fsPath, fs))) + } else { + new TextFileSource(Array(createTextFileSplit(fsPath, fs))) + } + } + + def create(fsPath: FsPath, is: InputStream): FileSource = { + if (!canRead(fsPath.getPath)) { + throw new StorageErrorException( + UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode, + UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc + ) + } + if (isResultSet(fsPath)) { + new ResultsetFileSource(Array(createResultSetFileSplit(fsPath, is))) + } else { + new TextFileSource(Array(createTextFileSplit(fsPath, is))) + } + } + + private def createResultSetFileSplit(fsPath: FsPath, is: InputStream): FileSplit = { + val resultset = ResultSetFactory.getInstance.getResultSetByPath(fsPath) + val resultsetReader = ResultSetReader.getResultSetReader(resultset, is) + new FileSplit(resultsetReader, resultset.resultSetType()) + } + + private def createResultSetFileSplit(fsPath: FsPath, fs: Fs): FileSplit = { + logger.info(s"try create result set file split with path:${fsPath.getPath}") + val resultset = ResultSetFactory.getInstance.getResultSetByPath(fsPath, fs) + val resultsetReader = ResultSetReader.getResultSetReader(resultset, fs.read(fsPath)) + new FileSplit(resultsetReader, resultset.resultSetType()) + } + + private def createTextFileSplit(fsPath: FsPath, is: InputStream): FileSplit = { + val scriptFsReader = ScriptFsReader.getScriptFsReader( + fsPath, + StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue, + is + ) + new FileSplit(scriptFsReader) + } + + private def createTextFileSplit(fsPath: FsPath, fs: Fs): FileSplit = { + val scriptFsReader = ScriptFsReader.getScriptFsReader( + fsPath, + StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue, + fs.read(fsPath) + ) + new FileSplit(scriptFsReader) + } + + private def canRead(path: String): Boolean = { + fileType.exists(suffixPredicate(path, _)) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala new file mode 100644 index 0000000000..f4163e8263 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.common.io.{FsReader, FsWriter, MetaData, Record} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.domain.{Column, DataType} +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} +import org.apache.linkis.storage.script.{ScriptMetaData, VariableParser} +import org.apache.linkis.storage.script.reader.StorageScriptFsReader + +import org.apache.commons.io.IOUtils +import org.apache.commons.math3.util.Pair + +import java.io.Closeable +import java.util + +import scala.collection.JavaConverters._ + +class FileSplit( + var fsReader: FsReader[_ <: MetaData, _ <: Record], + var `type`: String = "script/text" +) extends Closeable { + + var start: Int = 0 + + var end: Int = -1 + + var count: Int = 0 + + var totalLine = 0 + + var shuffler: Record => Record = r => r + + var pageTrigger: Boolean = false + + var params: util.Map[String, String] = new util.HashMap[String, String] + + private var limitBytes = 0L + private var limitColumnLength = 0 + + def page(page: Int, pageSize: Int): Unit = { + if (!pageTrigger) { + start = (page - 1) * pageSize + end = pageSize * page - 1 + pageTrigger = true + } + } + + def addParams(params: util.Map[String, String]): Unit = { + this.params.putAll(params) + } + + def addParams(key: String, value: String): Unit = { + this.params.put(key, value) + } + + def `while`[M](m: MetaData => M, r: Record => Unit): M = { + val metaData = fsReader.getMetaData + val t = m(metaData) + if (pageTrigger) { + fsReader.skip(start) + } + count = start + var hasRemovedFlag = false + while (fsReader.hasNext && ifContinueRead) { + val record = fsReader.getRecord + var needRemoveFlag = false + if (hasRemovedFlag == false && fsReader.isInstanceOf[StorageScriptFsReader]) { + val parser = fsReader.asInstanceOf[StorageScriptFsReader].getScriptParser() + val meta = metaData.asInstanceOf[ScriptMetaData].getMetaData + if ( + meta != null && meta.length > 0 + && parser != null && parser.getAnnotationSymbol().equals(record.toString) + ) { + needRemoveFlag = true + hasRemovedFlag = true + } + } + if (needRemoveFlag == false) { + r(shuffler(record)) + totalLine += 1 + count += 1 + } + } + t + } + + /** + * Get the colNumber and rowNumber of the row to be counted + * @param needToCountRowNumber + * @return + * colNumber, rowNumber + */ + def getFileInfo(needToCountRowNumber: Int = 5000): Pair[Int, Int] = { + val metaData = fsReader.getMetaData + val colNumber = metaData match { + case tableMetaData: TableMetaData => tableMetaData.columns.length + case _ => 1 + } + val rowNumber = if (needToCountRowNumber == -1) { + fsReader.skip(Int.MaxValue) + } else { + fsReader.skip(needToCountRowNumber) + } + new Pair(colNumber, rowNumber) + } + + def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]): Unit = { + `while`(fsWriter.addMetaData, fsWriter.addRecord) + } + + def collect(): Pair[Object, util.ArrayList[Array[String]]] = { + val record = new util.ArrayList[Array[String]] + var overFlag = false + var tmpBytes = 0L + + val metaData = `while`( + collectMetaData, + r => { + if (limitBytes > 0 && !overFlag) { + val resArr = collectRecord(r) + resArr.foreach(res => tmpBytes = tmpBytes + res.getBytes.length) + if (tmpBytes > limitBytes) { + overFlag = true + } + record.add(resArr) + } else { + record.add(collectRecord(r)) + } + } + ) + new Pair(metaData, record) + } + + def collectRecord(record: Record): Array[String] = { + record match { + case t: TableRecord => + if (limitColumnLength > 0) { + t.row.map { col => + val str = DataType.valueToString(col) + if (str.length > limitColumnLength) { + str.substring(0, limitColumnLength) + } else { + str + } + } + } else { + t.row.map(DataType.valueToString) + } + case l: LineRecord => Array(l.getLine) + } + } + + def collectMetaData(metaData: MetaData): Object = { + // script/text ,tableResultset,lineResultSet + metaData match { + case s: ScriptMetaData => VariableParser.getMap(s.getMetaData) + case l: LineMetaData => l.getMetaData + case t: TableMetaData => t.columns.map(ColumnToMap) + } + } + + private def ColumnToMap(column: Column): java.util.Map[String, String] = { + Map[String, String]( + "columnName" -> column.columnName, + "comment" -> column.comment, + "dataType" -> column.dataType.typeName + ) + }.asJava + + // 如果不分页,则一直读,如果分页,则 count需要小于count + def ifContinueRead: Boolean = !pageTrigger || count <= end + + def ifStartRead: Boolean = !pageTrigger || count >= start + + def setLimitBytes(limitBytes: Long): Unit = { + this.limitBytes = limitBytes + } + + def setLimitColumnLength(limitColumnLength: Int): Unit = { + this.limitColumnLength = limitColumnLength + } + + override def close(): Unit = IOUtils.closeQuietly(fsReader) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala new file mode 100644 index 0000000000..adbb596aa2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.resultset.table.TableRecord +import org.apache.linkis.storage.utils.StorageUtils + +class ResultsetFileSource(fileSplits: Array[FileSplit]) extends AbstractFileSource(fileSplits) { + + shuffle({ + case t: TableRecord => + new TableRecord(t.row.map { rvalue => + { + rvalue match { + case null | "NULL" => + val nullValue = getParams.getOrDefault("nullValue", "NULL") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + rvalue + } else { + nullValue + } + case "" => + val nullValue = getParams.getOrDefault("nullValue", "") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + "" + } else { + nullValue + } + case value: Double => StorageUtils.doubleToString(value) + case _ => rvalue + } + } + }) + case record => record + }) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala new file mode 100644 index 0000000000..08ce8f51c7 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.storage.LineRecord +import org.apache.linkis.storage.script.ScriptRecord + +import org.apache.commons.math3.util.Pair + +import java.util + +import scala.collection.JavaConverters._ + +class TextFileSource(fileSplits: Array[FileSplit]) extends AbstractFileSource(fileSplits) { + + shuffle({ + case s: ScriptRecord if "".equals(s.getLine) => new LineRecord("\n") + case record => record + }) + + override def collect(): Array[Pair[Object, util.ArrayList[Array[String]]]] = { + val collects: Array[Pair[Object, util.ArrayList[Array[String]]]] = super.collect() + if (!getParams.getOrDefault("ifMerge", "true").toBoolean) return collects + val snds: Array[util.ArrayList[Array[String]]] = collects.map(_.getSecond) + snds.foreach { snd => + val str = new StringBuilder + snd.asScala.foreach { + case Array("\n") => str.append("\n") + case Array(y) => str.append(y).append("\n") + } + snd.clear() + snd.add(Array(str.toString())) + } + collects + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala new file mode 100644 index 0000000000..9c344fa802 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.io.FsPath +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.fs.impl.LocalFileSystem + +import java.io.IOException +import java.util + +object FileSystemUtils extends Logging { + + def copyFile(filePath: FsPath, origin: FsPath, user: String): Unit = { + val fileSystem = FSFactory.getFsByProxyUser(filePath, user).asInstanceOf[FileSystem] + Utils.tryFinally { + fileSystem.init(null) + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + fileSystem.mkdirs(filePath.getParent) + } + fileSystem.createNewFile(filePath) + } + fileSystem.copyFile(origin, filePath) + }(Utils.tryQuietly(fileSystem.close())) + } + + /** + * Create a new file(创建新文件) + * + * @param filePath + * @param createParentWhenNotExists + * Whether to recursively create a directory(是否递归创建目录) + */ + def createNewFile(filePath: FsPath, createParentWhenNotExists: Boolean): Unit = { + createNewFile(filePath, StorageUtils.getJvmUser, createParentWhenNotExists) + } + + def createNewFile(filePath: FsPath, user: String, createParentWhenNotExists: Boolean): Unit = { + val fileSystem = FSFactory.getFsByProxyUser(filePath, user).asInstanceOf[FileSystem] + Utils.tryFinally { + fileSystem.init(null) + createNewFileWithFileSystem(fileSystem, filePath, user, createParentWhenNotExists) + }(Utils.tryQuietly(fileSystem.close())) + } + + @deprecated("please use createNewFileAndSetOwnerWithFileSystem") + def createNewFileWithFileSystem( + fileSystem: FileSystem, + filePath: FsPath, + user: String, + createParentWhenNotExists: Boolean + ): Unit = { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + if (!createParentWhenNotExists) { + throw new IOException("parent dir " + filePath.getParent.getPath + " dose not exists.") + } + mkdirs(fileSystem, filePath.getParent, user) + } + fileSystem.createNewFile(filePath) + fileSystem match { + case l: LocalFileSystem => fileSystem.setOwner(filePath, user) + case _ => logger.info(s"doesn't need to call setOwner") + } + } + } + + /** + * create new file and set file owner by FileSystem + * @param fileSystem + * @param filePath + * @param user + * @param createParentWhenNotExists + */ + def createNewFileAndSetOwnerWithFileSystem( + fileSystem: FileSystem, + filePath: FsPath, + user: String, + createParentWhenNotExists: Boolean + ): Unit = { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + if (!createParentWhenNotExists) { + throw new IOException("parent dir " + filePath.getParent.getPath + " dose not exists.") + } + mkdirsAndSetOwner(fileSystem, filePath.getParent, user) + } + fileSystem.createNewFile(filePath) + fileSystem.setOwner(filePath, user) + } + } + + /** + * Recursively create a directory(递归创建目录) + * @param fileSystem + * @param dest + * @param user + * @throws + * @return + */ + @throws[IOException] + @deprecated("please use mkdirsAndSetOwner") + def mkdirs(fileSystem: FileSystem, dest: FsPath, user: String): Boolean = { + var parentPath = dest.getParent + val dirsToMake = new util.Stack[FsPath]() + dirsToMake.push(dest) + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath) + parentPath = parentPath.getParent + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath) + } + while (!dirsToMake.empty()) { + val path = dirsToMake.pop() + fileSystem.mkdir(path) + fileSystem match { + case l: LocalFileSystem => fileSystem.setOwner(path, user) + case _ => logger.info(s"doesn't need to call setOwner") + } + } + true + } + + /** + * Recursively create a directory(递归创建目录) 默认添加 Owner 信息 + * @param fileSystem + * @param dest + * @param user + * @throws + * @return + */ + @throws[IOException] + def mkdirsAndSetOwner(fileSystem: FileSystem, dest: FsPath, user: String): Boolean = { + var parentPath = dest.getParent + val dirsToMake = new util.Stack[FsPath]() + dirsToMake.push(dest) + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath) + parentPath = parentPath.getParent + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath) + } + while (!dirsToMake.empty()) { + val path = dirsToMake.pop() + fileSystem.mkdir(path) + fileSystem.setOwner(path, user) + } + true + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala new file mode 100644 index 0000000000..e73991db15 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.conf.{ByteType, CommonVars} + +object StorageConfiguration { + + val PROXY_USER = CommonVars("wds.linkis.storage.proxy.user", "${UM}") + + val STORAGE_ROOT_USER = CommonVars("wds.linkis.storage.root.user", "hadoop") + + val HDFS_ROOT_USER = CommonVars("wds.linkis.storage.hdfs.root.user", "hadoop") + + val LOCAL_ROOT_USER = CommonVars("wds.linkis.storage.local.root.user", "root") + + val STORAGE_USER_GROUP = CommonVars("wds.linkis.storage.fileSystem.group", "bdap") + + val STORAGE_RS_FILE_TYPE = CommonVars("wds.linkis.storage.rs.file.type", "utf-8") + + val STORAGE_RS_FILE_SUFFIX = CommonVars("wds.linkis.storage.rs.file.suffix", ".dolphin") + + val LINKIS_STORAGE_FS_LABEL = CommonVars("linkis.storage.default.fs.label", "linkis-storage") + + val ResultTypes = List("%TEXT", "%TABLE", "%HTML", "%IMG", "%ANGULAR", "%SVG") + + val STORAGE_RESULT_SET_PACKAGE = + CommonVars("wds.linkis.storage.result.set.package", "org.apache.linkis.storage.resultset") + + val STORAGE_RESULT_SET_CLASSES = CommonVars( + "wds.linkis.storage.result.set.classes", + "txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet" + ) + + val STORAGE_BUILD_FS_CLASSES = CommonVars( + "wds.linkis.storage.build.fs.classes", + "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem" + ) + + val IS_SHARE_NODE = CommonVars("wds.linkis.storage.is.share.node", true) + + val ENABLE_IO_PROXY = CommonVars("wds.linkis.storage.enable.io.proxy", false) + + val IO_USER = CommonVars("wds.linkis.storage.io.user", "root") + val IO_FS_EXPIRE_TIME = CommonVars("wds.linkis.storage.io.fs.num", 1000 * 60 * 10) + + val IO_PROXY_READ_FETCH_SIZE = + CommonVars("wds.linkis.storage.io.read.fetch.size", new ByteType("100k")) + + val IO_PROXY_WRITE_CACHE_SIZE = + CommonVars("wds.linkis.storage.io.write.cache.size", new ByteType("64k")) + + val IO_DEFAULT_CREATOR = CommonVars("wds.linkis.storage.io.default.creator", "IDE") + val IO_FS_RE_INIT = CommonVars("wds.linkis.storage.io.fs.re.init", "re-init") + + val IO_INIT_RETRY_LIMIT = CommonVars("wds.linkis.storage.io.init.retry.limit", 10) + + val STORAGE_HDFS_GROUP = CommonVars("wds.linkis.storage.fileSystem.hdfs.group", "hadoop") + + val DOUBLE_FRACTION_LEN = CommonVars[Int]("wds.linkis.double.fraction.length", 30) + + val HDFS_PATH_PREFIX_CHECK_ON = + CommonVars[Boolean]("wds.linkis.storage.hdfs.prefix_check.enable", true) + + val HDFS_PATH_PREFIX_REMOVE = CommonVars[Boolean]("wds.linkis.storage.hdfs.prefxi.remove", true) + + val FS_CHECKSUM_DISBALE = + CommonVars[java.lang.Boolean]("linkis.fs.hdfs.impl.disable.checksum", false) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala new file mode 100644 index 0000000000..7e5169fc55 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader} +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +/** + * 工具类,用于做storage jar包打出来做测试用 Tool class, which is used to print the storage jar package for testing + */ + +object StorageHelper extends Logging { + + def main(args: Array[String]): Unit = { + if (args.length < 2) logger.info("Usage method params eg:getTableResLines path") + val method = args(0) + val params = args.slice(1, args.length) + Thread.sleep(10000L) + + method match { + case "getTableResLines" => getTableResLines(params) + case "getTableRes" => getTableRes(params) + case "createNewFile" => createNewFile(params) + case _ => logger.info("There is no such method") + } + } + + /** + * Get the number of table result set file lines(获得表格结果集文件行数) + * + * @param args + */ + def getTableResLines(args: Array[String]): Unit = { + val resPath = StorageUtils.getFsPath(args(0)) + val resultSetFactory = ResultSetFactory.getInstance + val resultSet = resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE) + val fs = FSFactory.getFs(resPath) + fs.init(null) + val reader = ResultSetReader.getResultSetReader(resultSet, fs.read(resPath)) + val rmetaData = reader.getMetaData + rmetaData.asInstanceOf[TableMetaData].columns.foreach(column => logger.info(column.toString)) + var num = 0 + Thread.sleep(10000L) + while (reader.hasNext) { + reader.getRecord + num = num + 1 + } + logger.info(num.toString) + reader.close() + } + + def getTableRes(args: Array[String]): Unit = { + val len = Integer.parseInt(args(1)) + val max = len + 10 + val resPath = StorageUtils.getFsPath(args(0)) + val resultSetFactory = ResultSetFactory.getInstance + val resultSet = resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE) + val fs = FSFactory.getFs(resPath) + fs.init(null) + val reader = ResultSetReader.getResultSetReader(resultSet, fs.read(resPath)) + val rmetaData = reader.getMetaData + rmetaData.asInstanceOf[TableMetaData].columns.foreach(column => logger.info(column.toString)) + rmetaData + .asInstanceOf[TableMetaData] + .columns + .map(_.columnName + ",") + .foreach(column => logger.info(column)) + var num = 0 + while (reader.hasNext) { + num = num + 1 + if (num > max) return + if (num > len) { + val record = reader.getRecord + record.asInstanceOf[TableRecord].row.foreach { value => + logger.info(value.toString) + logger.info(",") + } + logger.info("\n") + } + } + } + + def createNewFile(args: Array[String]): Unit = { + val resPath = StorageUtils.getFsPath(args(0)) + val proxyUser = StorageUtils.getJvmUser + FileSystemUtils.createNewFile(resPath, proxyUser, true) + logger.info("success") + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala new file mode 100644 index 0000000000..4b9368c049 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.io.{Fs, FsPath} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.hadoop.common.conf.HadoopConf +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.CONFIGURATION_NOT_READ +import org.apache.linkis.storage.exception.StorageFatalException +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader, ResultSetWriter} + +import org.apache.commons.lang3.StringUtils + +import java.io.{Closeable, File, InputStream, OutputStream} +import java.lang.reflect.Method +import java.text.NumberFormat + +import scala.collection.mutable + +object StorageUtils extends Logging { + + val HDFS = "hdfs" + val FILE = "file" + + val FILE_SCHEMA = "file://" + val HDFS_SCHEMA = "hdfs://" + + private val nf = NumberFormat.getInstance() + nf.setGroupingUsed(false) + nf.setMaximumFractionDigits(StorageConfiguration.DOUBLE_FRACTION_LEN.getValue) + + def doubleToString(value: Double): String = { + if (value.isNaN) { + "NaN" + } else { + nf.format(value) + } + } + + def loadClass[T](classStr: String, op: T => String): Map[String, T] = { + val _classes = classStr.split(",") + val classes = mutable.LinkedHashMap[String, T]() + for (clazz <- _classes) { + Utils.tryAndError { + val obj = Utils.getClassInstance[T](clazz.trim) + classes += op(obj) -> obj + } + } + classes.toMap + } + + /** + * Get the corresponding class by passing in the subclass and package name(通过传入子类和包名获得对应的class) + * @param classStr:Class + * name(类名) + * @param pge:Class + * package name(类的包名) + * @param op:Get + * key value(获取键值) + * @tparam T + * @return + */ + def loadClasses[T]( + classStr: String, + pge: String, + op: Class[T] => String + ): Map[String, Class[T]] = { + val _classes: Array[String] = + if (StringUtils.isEmpty(pge)) classStr.split(",") + else classStr.split(",").map { value: String => pge + "." + value } + val classes = mutable.LinkedHashMap[String, Class[T]]() + for (clazz <- _classes) { + Utils.tryAndError({ + val _class = + Thread.currentThread.getContextClassLoader.loadClass(clazz.trim).asInstanceOf[Class[T]] + classes += op(_class) -> _class + }) + } + classes.toMap + } + + /** + * Get the suffix of the file name(获得文件名的后缀) + * @param path + * @return + */ + def pathToSuffix(path: String): String = { + val fileName = new File(path).getName + if ((fileName != null) && (fileName.length > 0)) { + val dot: Int = fileName.lastIndexOf('.') + if ((dot > -1) && (dot < (fileName.length - 1))) return fileName.substring(dot + 1) + } + fileName + } + + /** + * Reflection calling method(反射调用方法) + * @param obj + * @param method + * @param args + * @return + */ + def invoke(obj: Any, method: Method, args: Array[AnyRef]): Any = { + method.invoke(obj, args) + } + + /** + * Serialized string is a result set of type Text(序列化字符串为Text类型的结果集) + * @param value + * @return + */ + def serializerStringToResult(value: String): String = { + val resultSet = ResultSetFactory.getInstance.getResultSetByType(ResultSetFactory.TEXT_TYPE) + val writer = ResultSetWriter.getResultSetWriter(resultSet, Long.MaxValue, null) + val metaData = new LineMetaData() + val record = new LineRecord(value) + writer.addMetaData(metaData) + writer.addRecord(record) + val res = writer.toString() + Utils.tryQuietly(writer.close()) + res + } + + /** + * The result set of serialized text is a string(序列化text的结果集为字符串) + * @param result + * @return + */ + def deserializerResultToString(result: String): String = { + val resultSet = ResultSetFactory.getInstance.getResultSetByType(ResultSetFactory.TEXT_TYPE) + val reader = ResultSetReader.getResultSetReader(resultSet, result) + reader.getMetaData + val sb = new StringBuilder + while (reader.hasNext) { + val record = reader.getRecord.asInstanceOf[LineRecord] + sb.append(record.getLine) + } + val value = sb.toString() + Utils.tryQuietly(reader.close()) + value + } + + def close(outputStream: OutputStream): Unit = { + close(outputStream, null, null) + } + + def close(inputStream: InputStream): Unit = { + close(null, inputStream, null) + } + + def close(fs: Fs): Unit = { + close(null, null, fs) + } + + def close(outputStream: OutputStream, inputStream: InputStream, fs: Fs): Unit = { + Utils.tryFinally(if (outputStream != null) outputStream.close())() + Utils.tryFinally(if (inputStream != null) inputStream.close())() + Utils.tryFinally(if (fs != null) fs.close())() + } + + def close(closeable: Closeable): Unit = { + Utils.tryFinally(if (closeable != null) closeable.close())() + } + + def getJvmUser: String = System.getProperty("user.name") + + def isHDFSNode: Boolean = { + val confPath = new File(HadoopConf.hadoopConfDir) + // TODO IO-client mode need return false + if (!confPath.exists() || confPath.isFile) { + throw new StorageFatalException( + CONFIGURATION_NOT_READ.getErrorCode, + CONFIGURATION_NOT_READ.getErrorDesc + ) + } else true + } + + /** + * Returns the FsPath by determining whether the path is a schema. By default, the FsPath of the + * file is returned. 通过判断path是否为schema来返回FsPath,默认返回file的FsPath + * @param path + * @return + */ + def getFsPath(path: String): FsPath = { + if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA)) new FsPath(path) + else { + new FsPath(FILE_SCHEMA + path) + } + } + + def readBytes(inputStream: InputStream, bytes: Array[Byte], len: Int): Int = { + var count = 0 + var readLen = 0 + // 当使用s3存储结果文件时时,com.amazonaws.services.s3.model.S3InputStream无法正确读取.dolphin文件。需要在循环条件添加: + // readLen >= 0 + // To resolve the issue when using S3 to store result files and + // com.amazonaws.services.s3.model.S3InputStream to read .dolphin files, you need to add the + // condition readLen >= 0 in the loop. + while (readLen < len && readLen >= 0) { + count = inputStream.read(bytes, readLen, len - readLen) + if (count == -1 && inputStream.available() < 1) return readLen + readLen += count + } + readLen + } + + def isIOProxy(): Boolean = { + StorageConfiguration.ENABLE_IO_PROXY.getValue + } + + def isHDFSPath(fsPath: FsPath): Boolean = { + HDFS.equals(fsPath.getFsType) + } + +} diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java deleted file mode 100644 index 902e835ec2..0000000000 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage; - -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -class LineMetaDataTest { - - @Test - public void testCloneMeta() { - LineMetaData origin = new LineMetaData("origin"); - LineMetaData copied = (LineMetaData) origin.cloneMeta(); - origin.setMetaData(origin.getMetaData().replace("o", "a")); - - System.out.println(origin.getMetaData()); - Assertions.assertThat(origin.getMetaData()).isEqualTo("arigin"); - System.out.println(copied.getMetaData()); - Assertions.assertThat(copied.getMetaData()).isEqualTo("origin"); - } -} diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java deleted file mode 100644 index e7e79a7058..0000000000 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; - -import java.io.*; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class StorageResultSetWriterFactoryTest { - - @Test - void testResultSetWrite() throws IOException { - // storage write - ResultSet resultSetByType = - ResultSetFactory.getInstance().getResultSetByType(ResultSetFactory.TEXT_TYPE); - - org.apache.linkis.common.io.resultset.ResultSetWriter - writer = ResultSetWriterFactory.getResultSetWriter(resultSetByType, 100L, null); - - String value = "value"; - LineMetaData metaData = new LineMetaData(null); - LineRecord record = new LineRecord(value); - writer.addMetaData(metaData); - writer.addRecord(record); - writer.flush(); - writer.close(); - String res = writer.toString(); - writer.close(); - Assertions.assertEquals("dolphin00000000010000000004NULL0000000005value", res); - } -} diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java index 2e1a30aa27..60a593665c 100644 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java +++ b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java @@ -25,6 +25,7 @@ import org.apache.linkis.storage.script.Variable; import org.apache.linkis.storage.script.VariableParser; import org.apache.linkis.storage.source.FileSource; +import org.apache.linkis.storage.source.FileSource$; import org.apache.commons.math3.util.Pair; @@ -37,6 +38,7 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -105,7 +107,7 @@ void TestSave() { Variable[] v = VariableParser.getVariables(params); List variableList = Arrays.stream(v) - .filter(var -> !StringUtils.isEmpty(var.getValue())) + .filter(var -> !StringUtils.isEmpty(var.value())) .collect(Collectors.toList()); try { @@ -136,8 +138,8 @@ void TestOpen() throws FileNotFoundException { InputStream inputStream = new FileInputStream(file); - FileSource fileSource = FileSource.create(new FsPath(fileName), inputStream); - Pair> collect = fileSource.collect()[0]; + FileSource fileSource = FileSource$.MODULE$.create(new FsPath(fileName), inputStream); + Pair> collect = fileSource.collect()[0]; String scriptRes = collect.getSecond().get(0)[0]; String metadataRes = new Gson().toJson(collect.getFirst()); diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java deleted file mode 100644 index 1210c64e03..0000000000 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.csv.CSVFsWriter; - -import java.io.IOException; -import java.io.OutputStream; - -import org.junit.jupiter.api.Test; - -class ResultsetFileSourceTest { - - @Test - public void testWriter() throws IOException { - String filePath = this.getClass().getResource("/result-read-test.dolphin").getFile().toString(); - FsPath sourceFsPath = new FsPath(filePath); - Fs sourceFs = FSFactory.getFs(sourceFsPath); - sourceFs.init(null); - - FsPath destFsPath = new FsPath(filePath + ".result"); - Fs destFs = FSFactory.getFs(destFsPath); - destFs.init(null); - OutputStream outputStream = destFs.write(destFsPath, true); - - CSVFsWriter cSVFsWriter = CSVFsWriter.getCSVFSWriter("UTF-8", ",", false, outputStream); - FileSource fileSource = FileSource.create(sourceFsPath, sourceFs); - fileSource.addParams("nullValue", "NULL").write(cSVFsWriter); - - cSVFsWriter.close(); - } -} diff --git a/linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin b/linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin deleted file mode 100644 index 0c48c045b0..0000000000 --- a/linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin +++ /dev/null @@ -1 +0,0 @@ -dolphin0000000002000000002900000000063,6,4,_c0bigintNULL000000001400000000022,55 \ No newline at end of file diff --git a/linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin b/linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin deleted file mode 100644 index b94e3d96fb..0000000000 --- a/linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin +++ /dev/null @@ -1 +0,0 @@ -dolphin00000000010000000004NULL0000000011hello world \ No newline at end of file diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala index 3bfc35b17c..e0d3d7efb6 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala @@ -25,7 +25,7 @@ class DataTypeTest { @DisplayName("constTest") def constTest(): Unit = { - val nullvalue = DataType.NULL_VALUE + val nullvalue = Dolphin.NULL val lowcasenullvalue = DataType.LOWCASE_NULL_VALUE Assertions.assertEquals("NULL", nullvalue) @@ -60,4 +60,19 @@ class DataTypeTest { } + @Test + @DisplayName("toValueTest") + def toValueTest(): Unit = { + val dateType = DataType.toDataType("double") + val str = DataType.toValue(dateType, "NaN") + Assertions.assertNotNull(str) + } + + @Test + @DisplayName("decimalTest") + def decimalTest(): Unit = { + val dateType = DataType.toDataType("decimal(10, 8)") + Assertions.assertTrue(dateType.typeName.equals("decimal")) + } + } diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala index 6534b25c6f..ecd5c89cf9 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala @@ -46,7 +46,6 @@ class StorageConfigurationTest { val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue val hdfspathprefixcheckon = StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue val hdfspathprefixremove = StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue - val fscachedisable = StorageConfiguration.FS_CACHE_DISABLE.getValue val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE.getValue Assertions.assertEquals("hadoop", storagerootuser) @@ -62,8 +61,7 @@ class StorageConfigurationTest { storageresultsetclasses ) Assertions.assertEquals( - "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem," + - "org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem", + "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem", storagebuildfsclasses ) Assertions.assertTrue(issharenode) @@ -77,7 +75,6 @@ class StorageConfigurationTest { Assertions.assertTrue(30 == doublefractionlen) Assertions.assertTrue(hdfspathprefixcheckon) Assertions.assertTrue(hdfspathprefixremove) - Assertions.assertFalse(fscachedisable) Assertions.assertFalse(fschecksumdisbale) } diff --git a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala similarity index 73% rename from linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala rename to linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala index dbf3f5e3b5..6ae15782ee 100644 --- a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala @@ -15,21 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.protocol.engine +package org.apache.linkis.storage.utils import org.junit.jupiter.api.{Assertions, DisplayName, Test} -class ResponseUserEngineKillTest { +class StorageUtilsTest { @Test - @DisplayName("constTest") - def constTest(): Unit = { + @DisplayName("doubleToStringTest") + def doubleToStringTest(): Unit = { + val str = StorageUtils.doubleToString(Double.NaN) + Assertions.assertEquals("NaN", str) - val success = ResponseUserEngineKill.Success - val error = ResponseUserEngineKill.Error - - Assertions.assertEquals("Success", success) - Assertions.assertEquals("Error", error) } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java index 07df8b79ad..cfa57d4e26 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java @@ -73,11 +73,16 @@ public static CliCtx buildCtx(String[] args) throws LinkisClientRuntimeException ParseResult result = parser.parse(args); ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); + parsedTplValidator.doValidation(result.getParsedTemplate()); Params params = result.getParams(); logger.debug("==========params============\n" + CliUtils.GSON.toJson(params)); + /* + VarAccess for sys_prop, sys_env + */ + Map propertiesMap = new HashMap<>(); LoggerManager.getInformationLogger() diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java index 1fb21043a1..24ee3c8dcc 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java @@ -69,7 +69,7 @@ public static void main(String[] args) { CmdTemplate template = CmdTemplateFactory.getTemplateOri(e.getCmdType()); if (template != null) { HelpInfoModel model = new HelpInfoModel(); - model.buildModel(ctx.getTemplate()); + model.buildModel(template); new HelpPresenter().present(model); } LoggerManager.getInformationLogger().error("Failed to build CliCtx", e); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java index e497401bef..eee29c8e94 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java @@ -93,7 +93,7 @@ public void setValueWithStr(String value) throws IllegalArgumentException { } public T getValue() { - return this.value; + return this.value == null ? this.defaultValue : this.value; } public void setValue(T value) { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java index ee66a64463..47af30b0d0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java @@ -43,14 +43,9 @@ public Flag( @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("\t") - .append(StringUtils.join(paramNames, "|")) - .append(" <") - .append(this.getDefaultValue().getClass().getSimpleName()) - .append(">") - .append(System.lineSeparator()); + sb.append("\t").append(StringUtils.join(paramNames, "|")).append(System.lineSeparator()); - sb.append("\t\t").append(this.getDefaultValue()).append(System.lineSeparator()); + sb.append("\t\t").append(this.getDescription()).append(System.lineSeparator()); sb.append("\t\tdefault by: ").append(this.getDefaultValue()).append(System.lineSeparator()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java index e594d9cc23..629c466841 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java @@ -30,6 +30,9 @@ public class InteractiveJobDesc { private Map labelMap; private Map sourceMap; + // 需要加到header中的一些参数 + private Map headers; + public String getSubmitUser() { return submitUser; } @@ -101,4 +104,12 @@ public Map getLabelMap() { public void setLabelMap(Map labelMap) { this.labelMap = labelMap; } + + public Map getHeaders() { + return headers; + } + + public void setHeaders(Map headers) { + this.headers = headers; + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java index 0c8a3db539..2b0b20188a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java @@ -26,6 +26,7 @@ import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; import org.apache.commons.lang3.StringUtils; @@ -135,7 +136,12 @@ public static InteractiveJobDesc build(CliCtx ctx) { } if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = CliUtils.readFile(codePath); + try { + code = CliUtils.readFile(codePath); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to read file", e); + throw e; + } } executionMap.put(LinkisKeys.KEY_CODE, code); @@ -143,6 +149,9 @@ public static InteractiveJobDesc build(CliCtx ctx) { labelMap.put(LinkisKeys.KEY_CODETYPE, runType); labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + if (ctx.getExtraMap().containsKey(CliKeys.VERSION)) { + sourceMap.put(LinkisKeys.CLI_VERSION, ctx.getExtraMap().get(CliKeys.VERSION)); + } runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); desc.setCreator(creator); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java index bfebf62c71..1c17fcd969 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java @@ -103,6 +103,7 @@ public LinkisOperResultAdapter submit(InteractiveJobDesc jobDesc) .setVariableMap(jobDesc.getParamVarsMap()) .setLabels(jobDesc.getLabelMap()) .setSource(jobDesc.getSourceMap()) + .setHeaders(jobDesc.getHeaders()) .build(); logger.info("Request info to Linkis: \n{}", CliUtils.GSON.toJson(jobSubmitAction)); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java index 9a54699165..c2d47e2b7a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java @@ -32,7 +32,6 @@ public static void writeToFile( String pathName, String fileName, String content, Boolean overWrite) { File dir = new File(pathName); - File file = new File(fileName); if (!dir.exists()) { try { @@ -47,6 +46,8 @@ public static void writeToFile( } } + File file = new File(dir.getAbsolutePath() + File.separator + fileName); + if (overWrite || !file.exists()) { try { file.createNewFile(); diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala index 9cc2863559..eff8411603 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala @@ -174,7 +174,7 @@ object LinkisJobBuilder { private var threadPool: ScheduledThreadPoolExecutor = Utils.defaultScheduler private var serverUrl: String = _ - private var authTokenValue: String = CommonVars[String]( + var authTokenValue: String = CommonVars[String]( "wds.linkis.client.test.common.tokenValue", "LINKIS_CLI_TEST" ).getValue // This is the default authToken, we usually suggest set different ones for users. diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala index d44c479abb..80e8e7ad42 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala @@ -17,11 +17,37 @@ package org.apache.linkis.computation.client -import org.apache.linkis.computation.client.interactive.InteractiveJob -import org.apache.linkis.computation.client.once.OnceJob +import org.apache.linkis.bml.client.BmlClientFactory +import org.apache.linkis.computation.client.interactive.{InteractiveJob, InteractiveJobBuilder} +import org.apache.linkis.computation.client.once.{LinkisManagerClient, OnceJob} +import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SimpleOnceJobBuilder} +import org.apache.linkis.httpclient.dws.config.DWSClientConfig +import org.apache.linkis.ujes.client.UJESClientImpl import java.io.Closeable +class LinkisJobClient(clientConfig: DWSClientConfig) extends Closeable { + + private val ujseClient = new UJESClientImpl(clientConfig) + + private lazy val linkisManagerCLient = LinkisManagerClient(ujseClient) + + override def close(): Unit = { + if (null != linkisManagerCLient) { + linkisManagerCLient.close() + } + } + + def onceJobBuilder(): SimpleOnceJobBuilder = + SimpleOnceJob.builder(SimpleOnceJobBuilder.getBmlClient(clientConfig), linkisManagerCLient) + + def interactiveJobBuilder(): InteractiveJobBuilder = { + val builder = InteractiveJob.builder() + builder.setUJESClient(ujseClient) + } + +} + /** * This class is only used to provide a unified entry for user to build a LinkisJob conveniently and * simply. Please keep this class lightweight enough, do not set too many field to confuse user. diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala index 45f3f49bed..bc1bb75f55 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala @@ -24,7 +24,8 @@ import org.apache.linkis.computation.client.once.action.{ EngineConnOperateAction, GetEngineConnAction, KillEngineConnAction, - LinkisManagerAction + LinkisManagerAction, + ListEngineConnAction } import org.apache.linkis.computation.client.once.result.{ AskEngineConnResult, @@ -32,7 +33,8 @@ import org.apache.linkis.computation.client.once.result.{ EngineConnOperateResult, GetEngineConnResult, KillEngineConnResult, - LinkisManagerResult + LinkisManagerResult, + ListEngineConnResult } import org.apache.linkis.httpclient.dws.DWSHttpClient import org.apache.linkis.httpclient.request.Action @@ -50,6 +52,8 @@ trait LinkisManagerClient extends Closeable { def killEngineConn(killEngineConnAction: KillEngineConnAction): KillEngineConnResult + def listEngineConn(listEngineConnAction: ListEngineConnAction): ListEngineConnResult + def executeEngineConnOperation( engineConnOperateAction: EngineConnOperateAction ): EngineConnOperateResult @@ -104,4 +108,8 @@ class LinkisManagerClientImpl(ujesClient: UJESClient) extends LinkisManagerClien override def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult = execute(askEngineConnAction) + override def listEngineConn(listEngineConnAction: ListEngineConnAction): ListEngineConnResult = { + execute(listEngineConnAction) + } + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala new file mode 100644 index 0000000000..c76a5e78e3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.action + +import org.apache.linkis.httpclient.request.GetAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +class ListEngineConnAction extends GetAction with LinkisManagerAction { + override def suffixURLs: Array[String] = Array("linkisManager", "listUserEngines") +} + +object ListEngineConnAction { + def newBuilder(): Builder = new Builder + + class Builder private[ListEngineConnAction] () { + + private var user: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def build(): ListEngineConnAction = { + if (user == null) throw new UJESClientBuilderException("user is needed!") + val listEngineConnAction = new ListEngineConnAction + listEngineConnAction.setUser(user) + listEngineConnAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala index e964cd714c..b20923de89 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala @@ -17,6 +17,7 @@ package org.apache.linkis.computation.client.once.result +import org.apache.linkis.common.ServiceInstance import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult import java.util @@ -32,4 +33,41 @@ class GetEngineConnResult extends LinkisManagerResult { def getNodeInfo: util.Map[String, Any] = engineConnNode + protected def getAs[T](map: util.Map[String, Any], key: String): T = + map.get(key).asInstanceOf[T] + + def getTicketId(): String = getAs(engineConnNode, "ticketId") + + def getServiceInstance(): ServiceInstance = + engineConnNode.get("serviceInstance") match { + case serviceInstance: util.Map[String, Any] => + ServiceInstance( + getAs(serviceInstance, "applicationName"), + getAs(serviceInstance, "instance") + ) + case _ => null + } + + def getNodeStatus(): String = getAs(engineConnNode, "nodeStatus") + + def getECMServiceInstance(): ServiceInstance = + engineConnNode.get("ecmServiceInstance") match { + case serviceInstance: util.Map[String, Any] => + ServiceInstance( + getAs(serviceInstance, "applicationName"), + getAs(serviceInstance, "instance") + ) + case _ => null + } + + def getManagerServiceInstance(): ServiceInstance = + engineConnNode.get("managerServiceInstance") match { + case serviceInstance: util.Map[String, Any] => + ServiceInstance( + getAs(serviceInstance, "applicationName"), + getAs(serviceInstance, "instance") + ) + case _ => null + } + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala new file mode 100644 index 0000000000..c31ccf481f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.result + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult + +import java.util + +@DWSHttpMessageResult("/api/rest_j/v\\d+/linkisManager/listUserEngines") +class ListEngineConnResult extends LinkisManagerResult { + + private var engines: util.List[util.Map[String, AnyRef]] = _ + + def setEngines(engines: util.List[util.Map[String, AnyRef]]): Unit = { + this.engines = engines + } + + def getEngines: util.List[util.Map[String, AnyRef]] = engines + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala index 4992b17814..13d96c238a 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala @@ -17,13 +17,13 @@ package org.apache.linkis.computation.client.once.simple +import org.apache.linkis.bml.client.BmlClient import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.utils.Utils import org.apache.linkis.computation.client.LinkisJobMetrics import org.apache.linkis.computation.client.job.AbstractSubmittableLinkisJob import org.apache.linkis.computation.client.once.{LinkisManagerClient, OnceJob, SubmittableOnceJob} import org.apache.linkis.computation.client.once.action.CreateEngineConnAction -import org.apache.linkis.computation.client.once.result.CreateEngineConnResult import org.apache.linkis.computation.client.operator.OnceJobOperator import java.util.Locale @@ -109,15 +109,13 @@ class SubmittableSimpleOnceJob( with AbstractSubmittableLinkisJob { private var ecmServiceInstance: ServiceInstance = _ - private var createEngineConnResult: CreateEngineConnResult = _ def getECMServiceInstance: ServiceInstance = ecmServiceInstance - def getCreateEngineConnResult: CreateEngineConnResult = createEngineConnResult override protected def doSubmit(): Unit = { logger.info(s"Ready to create a engineConn: ${createEngineConnAction.getRequestPayload}.") - createEngineConnResult = linkisManagerClient.createEngineConn(createEngineConnAction) - lastNodeInfo = createEngineConnResult.getNodeInfo + val nodeInfo = linkisManagerClient.createEngineConn(createEngineConnAction) + lastNodeInfo = nodeInfo.getNodeInfo serviceInstance = getServiceInstance(lastNodeInfo) ticketId = getTicketId(lastNodeInfo) ecmServiceInstance = getECMServiceInstance(lastNodeInfo) @@ -160,6 +158,11 @@ object SimpleOnceJob { def builder(): SimpleOnceJobBuilder = new SimpleOnceJobBuilder + def builder( + bmlClient: BmlClient, + linkisManagerClient: LinkisManagerClient + ): SimpleOnceJobBuilder = new SimpleOnceJobBuilder(bmlClient, linkisManagerClient) + /** * Build a submitted SimpleOnceJob by id and user. * @param id diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala index dc4451ff0f..d7c4746188 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala @@ -20,6 +20,7 @@ package org.apache.linkis.computation.client.once.simple import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} import org.apache.linkis.common.utils.Utils import org.apache.linkis.computation.client.LinkisJobBuilder +import org.apache.linkis.computation.client.LinkisJobBuilder.clientConfig import org.apache.linkis.computation.client.once.LinkisManagerClient import org.apache.linkis.computation.client.once.action.CreateEngineConnAction import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder._ @@ -28,6 +29,8 @@ import org.apache.linkis.governance.common.entity.job.OnceExecutorContent import org.apache.linkis.governance.common.utils.OnceExecutorContentUtils import org.apache.linkis.governance.common.utils.OnceExecutorContentUtils.BmlResource import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.ujes.client.exception.UJESJobException @@ -38,12 +41,19 @@ import java.util import scala.collection.convert.WrapAsJava._ import scala.collection.convert.WrapAsScala._ -class SimpleOnceJobBuilder private[simple] () extends LinkisJobBuilder[SubmittableSimpleOnceJob] { +class SimpleOnceJobBuilder private[simple] ( + private val bmlClient: BmlClient, + private val linkisManagerClient: LinkisManagerClient +) extends LinkisJobBuilder[SubmittableSimpleOnceJob] { private var createService: String = _ private var maxSubmitTime: Long = _ private var description: String = _ + def this() = { + this(null, null) + } + def setCreateService(createService: String): this.type = { this.createService = createService this @@ -69,10 +79,26 @@ class SimpleOnceJobBuilder private[simple] () extends LinkisJobBuilder[Submittab val contentMap = OnceExecutorContentUtils.contentToMap(onceExecutorContent) val bytes = DWSHttpClient.jacksonJson.writeValueAsBytes(contentMap) val response = - getBmlClient.uploadResource(executeUser, getFilePath, new ByteArrayInputStream(bytes)) + getThisBMLClient.uploadResource(executeUser, getFilePath, new ByteArrayInputStream(bytes)) OnceExecutorContentUtils.resourceToValue(BmlResource(response.resourceId, response.version)) } + protected def getThisBMLClient(): BmlClient = { + if (null == this.bmlClient) { + getBmlClient(LinkisJobBuilder.getDefaultClientConfig) + } else { + this.bmlClient + } + } + + protected def getThisLinkisManagerClient(): LinkisManagerClient = { + if (null == this.linkisManagerClient) { + getLinkisManagerClient + } else { + this.linkisManagerClient + } + } + override def build(): SubmittableSimpleOnceJob = { ensureNotNull(labels, "labels") ensureNotNull(jobContent, "jobContent") @@ -99,7 +125,7 @@ class SimpleOnceJobBuilder private[simple] () extends LinkisJobBuilder[Submittab .setMaxSubmitTime(maxSubmitTime) .setDescription(description) .build() - new SubmittableSimpleOnceJob(getLinkisManagerClient, createEngineConnAction) + new SubmittableSimpleOnceJob(getThisLinkisManagerClient, createEngineConnAction) } implicit def toMap(map: util.Map[String, Any]): util.Map[String, String] = map.map { @@ -128,10 +154,27 @@ object SimpleOnceJobBuilder { private var bmlClient: BmlClient = _ private var linkisManagerClient: LinkisManagerClient = _ - def getBmlClient: BmlClient = { + def getBmlClient(clientConfig: DWSClientConfig): BmlClient = { if (bmlClient == null) synchronized { if (bmlClient == null) { - bmlClient = BmlClientFactory.createBmlClient(LinkisJobBuilder.getDefaultClientConfig) + val newClientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(clientConfig.getServerUrl) + .connectionTimeout(clientConfig.getConnectTimeout) + .discoveryEnabled(clientConfig.isDiscoveryEnabled) + .loadbalancerEnabled(clientConfig.isLoadbalancerEnabled) + .maxConnectionSize(clientConfig.getMaxConnection) + .retryEnabled(clientConfig.isRetryEnabled) + .setRetryHandler(clientConfig.getRetryHandler) + .readTimeout( + clientConfig.getReadTimeout + ) // We think 90s is enough, if SocketTimeoutException is throw, just set a new clientConfig to modify it. + .setAuthenticationStrategy(new TokenAuthenticationStrategy()) + .setAuthTokenKey(TokenAuthenticationStrategy.TOKEN_KEY) + .setAuthTokenValue(LinkisJobBuilder.authTokenValue) + .setDWSVersion(clientConfig.getDWSVersion) + .build() + bmlClient = BmlClientFactory.createBmlClient(newClientConfig) Utils.addShutdownHook(() => bmlClient.close()) } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala similarity index 51% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java rename to linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala index 280c35d660..3e7f675592 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala @@ -15,17 +15,32 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.server.loader; +package org.apache.linkis.ujes.client -import org.apache.linkis.engineplugin.loader.loaders.DefaultEngineConnPluginLoader; +import org.apache.linkis.ujes.client.request.{ + CreateNewDirAction, + IsPathExistAction, + UploadFileAction +} +import org.apache.linkis.ujes.client.response.{ + CreateNewDirResult, + IsPathExistResult, + UploadFileResult +} + +class LinkisFSClient(client: UJESClient) { -public class EngineConnPluginsLoaderFactory { + def isPathExist(isPathExistAction: IsPathExistAction): Boolean = { + val result = client.executeUJESJob(isPathExistAction).asInstanceOf[IsPathExistResult] + result.isExist + } - private static final org.apache.linkis.engineplugin.loader.loaders.EngineConnPluginsLoader - engineConnPluginsLoader = new DefaultEngineConnPluginLoader(); + def createNewDir(makeDirAction: CreateNewDirAction): CreateNewDirResult = { + client.executeUJESJob(makeDirAction).asInstanceOf[CreateNewDirResult] + } - public static org.apache.linkis.engineplugin.loader.loaders.EngineConnPluginsLoader - getEngineConnPluginsLoader() { - return engineConnPluginsLoader; + def upload(uploadFileAction: UploadFileAction): UploadFileResult = { + client.executeUJESJob(uploadFileAction).asInstanceOf[UploadFileResult] } + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala index 6431c47ebf..19ac7343d8 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala @@ -17,6 +17,7 @@ package org.apache.linkis.ujes.client +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.httpclient.authentication.AuthenticationStrategy import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} @@ -24,11 +25,13 @@ import org.apache.linkis.httpclient.response.Result import org.apache.linkis.ujes.client.request._ import org.apache.linkis.ujes.client.request.JobExecIdAction.JobServiceType import org.apache.linkis.ujes.client.response._ +import org.apache.linkis.ujes.client.utils.UJESClientUtils import java.io.Closeable +import java.util import java.util.concurrent.TimeUnit -abstract class UJESClient extends Closeable { +abstract class UJESClient extends Closeable with Logging { def execute(jobExecuteAction: JobExecuteAction): JobExecuteResult = executeUJESJob( jobExecuteAction @@ -37,7 +40,7 @@ abstract class UJESClient extends Closeable { def submit(jobSubmitAction: JobSubmitAction): JobSubmitResult = executeUJESJob(jobSubmitAction).asInstanceOf[JobSubmitResult] - protected[client] def executeUJESJob(ujesJobAction: UJESJobAction): Result + def executeUJESJob(ujesJobAction: UJESJobAction): Result private def executeJobExecIdAction[T]( jobExecuteResult: JobExecuteResult, @@ -52,12 +55,37 @@ abstract class UJESClient extends Closeable { executeUJESJob(jobExecIdAction).asInstanceOf[T] } + /** + * only get the status of the cache Task status should be based on getJobInfo + * @param jobExecuteResult + * @return + */ def status(jobExecuteResult: JobExecuteResult): JobStatusResult = executeJobExecIdAction(jobExecuteResult, JobServiceType.JobStatus) + /** + * IF exception return null progress result + * @param jobExecuteResult + * @return + */ def progress(jobExecuteResult: JobExecuteResult): JobProgressResult = - executeJobExecIdAction(jobExecuteResult, JobServiceType.JobProgress) - + Utils.tryCatch( + executeJobExecIdAction(jobExecuteResult, JobServiceType.JobProgress) + .asInstanceOf[JobProgressResult] + ) { t => + logger.warn("Failed to get progress, return empty progress.", t) + val result = new JobProgressResult + result.setProgress(0) + result + } + + /** + * If exception return null log + * @param jobExecuteResult + * @param fromLine + * @param size + * @return + */ def log(jobExecuteResult: JobExecuteResult, fromLine: Int, size: Int): JobLogResult = { val jobLogAction = JobLogAction .builder() @@ -66,13 +94,19 @@ abstract class UJESClient extends Closeable { .setFromLine(fromLine) .setSize(size) .build() - executeUJESJob(jobLogAction).asInstanceOf[JobLogResult] - } - def list(jobListAction: JobListAction): JobListResult = { - executeUJESJob(jobListAction).asInstanceOf[JobListResult] + Utils.tryCatch(executeUJESJob(jobLogAction).asInstanceOf[JobLogResult]) { t => + logger.warn("Failed to get Log, return empty log.", t) + null + } } + /** + * If exception return null log + * @param jobExecuteResult + * @param jobLogResult + * @return + */ def log(jobExecuteResult: JobExecuteResult, jobLogResult: JobLogResult): JobLogResult = { val jobLogAction = JobLogAction .builder() @@ -80,13 +114,21 @@ abstract class UJESClient extends Closeable { .setUser(jobExecuteResult.getUser) .setFromLine(jobLogResult.getFromLine) .build() - executeUJESJob(jobLogAction).asInstanceOf[JobLogResult] + + Utils.tryCatch(executeUJESJob(jobLogAction).asInstanceOf[JobLogResult]) { t => + logger.warn("Failed to get Log, return empty log.", t) + null + } } def openLog(openLogAction: OpenLogAction): OpenLogResult = { executeUJESJob(openLogAction).asInstanceOf[OpenLogResult] } + def list(jobListAction: JobListAction): JobListResult = { + executeUJESJob(jobListAction).asInstanceOf[JobListResult] + } + def kill(jobExecuteResult: JobExecuteResult): JobKillResult = executeJobExecIdAction(jobExecuteResult, JobServiceType.JobKill) diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala index b173f53d55..0feabaafda 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala @@ -26,7 +26,7 @@ import org.apache.linkis.ujes.client.request.UJESJobAction class UJESClientImpl(clientConfig: DWSClientConfig) extends UJESClient { private val dwsHttpClient = new DWSHttpClient(clientConfig, "Linkis-Job-Execution-Thread") - override protected[client] def executeUJESJob(ujesJobAction: UJESJobAction): Result = + override def executeUJESJob(ujesJobAction: UJESJobAction): Result = ujesJobAction match { case action: Action => dwsHttpClient.execute(action) } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala new file mode 100644 index 0000000000..561bfc07d1 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.request.POSTAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +class CreateNewDirAction extends POSTAction with UJESJobAction { + + override def suffixURLs: Array[String] = Array("filesystem", "createNewDir") + + override def getRequestPayload: String = + DWSHttpClient.jacksonJson.writeValueAsString(getRequestPayloads) + +} + +object CreateNewDirAction { + def builder(): Builder = new Builder + + class Builder private[CreateNewDirAction] () { + private var user: String = _ + private var path: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setPath(path: String): Builder = { + this.path = path + this + } + + def build(): CreateNewDirAction = { + val makeDirAction = new CreateNewDirAction + if (user == null) throw new UJESClientBuilderException("user is needed!") + if (path == null) throw new UJESClientBuilderException("path is needed!") + makeDirAction.setUser(user) + makeDirAction.addRequestPayload("path", path) + makeDirAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala new file mode 100644 index 0000000000..e9e74edd16 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.request.GetAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +class IsPathExistAction extends GetAction with UJESJobAction { + + override def suffixURLs: Array[String] = Array("filesystem", "isExist") +} + +object IsPathExistAction { + def builder(): Builder = new Builder + + class Builder private[IsPathExistAction] () { + private var user: String = _ + private var path: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setPath(path: String): Builder = { + this.path = path + this + } + + def build(): IsPathExistAction = { + val isPathExistAction = new IsPathExistAction + if (user == null) throw new UJESClientBuilderException("user is needed!") + if (path == null) throw new UJESClientBuilderException("path is needed!") + isPathExistAction.setUser(user) + isPathExistAction.setParameter("path", path) + isPathExistAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala index f96c6227fe..aba26c619f 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala @@ -25,6 +25,8 @@ import org.apache.linkis.ujes.client.exception.UJESClientBuilderException import java.util +import scala.collection.JavaConverters.mapAsScalaMapConverter + class JobSubmitAction private () extends POSTAction with UJESJobAction { override def suffixURLs: Array[String] = Array("entrance", "submit") @@ -52,6 +54,8 @@ object JobSubmitAction { private var source: util.Map[String, AnyRef] = _ + private var headers: util.Map[String, String] = _ + def addExecuteCode(executeCode: String): Builder = { if (null == executionContent) executionContent = new util.HashMap[String, AnyRef]() executionContent.put("code", executeCode) @@ -129,6 +133,11 @@ object JobSubmitAction { this } + def setHeaders(headers: util.Map[String, String]): Builder = { + this.headers = headers + this + } + def build(): JobSubmitAction = { val submitAction = new JobSubmitAction submitAction.setUser(user) @@ -145,6 +154,11 @@ object JobSubmitAction { if (this.labels == null) this.labels = new util.HashMap[String, AnyRef]() submitAction.addRequestPayload(TaskConstant.LABELS, this.labels) + + if (this.headers == null) this.headers = new util.HashMap[String, String]() + this.headers.asScala.foreach { case (k, v) => + if (k != null && v != null) submitAction.addHeader(k, v) + } submitAction } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala index 9eb748691e..708689089a 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala @@ -38,6 +38,10 @@ object ResultSetAction { // default value is :org.apache.linkis.storage.domain.Dolphin.LINKIS_NULL private var nullValue: String = "LINKIS_NULL" + private var enableLimit: Option[Boolean] = None + private var columnPage: Int = _ + private var columnPageSize: Int = _ + def setUser(user: String): Builder = { this.user = user this @@ -68,6 +72,21 @@ object ResultSetAction { this } + def setEnableLimit(enableLimit: Boolean): Builder = { + this.enableLimit = Some(enableLimit) + this + } + + def setColumnPage(columnPage: Int): Builder = { + this.columnPage = columnPage + this + } + + def setColumnPageSize(columnPageSize: Int): Builder = { + this.columnPageSize = columnPageSize + this + } + def build(): ResultSetAction = { if (user == null) throw new UJESClientBuilderException("user is needed!") if (path == null) throw new UJESClientBuilderException("path is needed!") @@ -76,7 +95,18 @@ object ResultSetAction { if (page > 0) resultSetAction.setParameter("page", page) if (pageSize > 0) resultSetAction.setParameter("pageSize", pageSize) resultSetAction.setParameter("charset", charset) + if (enableLimit.isDefined) resultSetAction.setParameter("enableLimit", true) resultSetAction.setParameter("nullValue", nullValue) + if (columnPage > 0) { + resultSetAction.setParameter("columnPage", columnPage) + } else { + resultSetAction.setParameter("columnPage", null) + } + if (columnPageSize > 0) { + resultSetAction.setParameter("columnPageSize", columnPageSize) + } else { + resultSetAction.setParameter("columnPageSize", null) + } resultSetAction.setUser(user) resultSetAction } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala new file mode 100644 index 0000000000..4248a9c7c6 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.request.{BinaryBody, GetAction, UploadAction} +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +import org.apache.http.entity.ContentType + +import java.io.{File, FileInputStream} +import java.util + +import scala.collection.JavaConverters._ + +class UploadFileAction extends GetAction with UploadAction with UJESJobAction { + override def suffixURLs: Array[String] = Array("filesystem", "upload") + + override val files: util.Map[String, String] = new util.HashMap[String, String]() + + override val binaryBodies: util.List[BinaryBody] = new util.ArrayList[BinaryBody](0) + +} + +object UploadFileAction { + def builder(): Builder = new Builder + + class Builder private[UploadFileAction] { + private var user: String = _ + private var path: String = _ + private var uploadFiles: util.List[File] = new util.ArrayList[File](0) + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setPath(path: String): Builder = { + this.path = path + this + } + + def addFile(file: File): Builder = { + this.uploadFiles.add(file) + this + } + + def build(): UploadFileAction = { + val uploadFileAction = new UploadFileAction + if (user == null) throw new UJESClientBuilderException("user is needed!") + if (path == null) throw new UJESClientBuilderException("path is needed!") + + uploadFileAction.setUser(user) + uploadFileAction.setParameter("path", path) + uploadFiles.asScala.foreach { file => + println(String.format("=============== upload file ========== %s ", file.getAbsolutePath)) + uploadFileAction.binaryBodies.add( + BinaryBody + .apply("file", new FileInputStream(file), file.getName, ContentType.MULTIPART_FORM_DATA) + ) + } + + uploadFileAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala new file mode 100644 index 0000000000..0871f4042e --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.ujes.client.request.UserAction + +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/createNewDir") +class CreateNewDirResult extends DWSResult with UserAction {} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala new file mode 100644 index 0000000000..c87cd7d2c7 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.ujes.client.request.UserAction + +import scala.beans.BeanProperty + +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/isExist") +class IsPathExistResult extends DWSResult with UserAction { + @BeanProperty var isExist: Boolean = _ +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala index 973573f494..9051748c36 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala @@ -20,6 +20,9 @@ package org.apache.linkis.ujes.client.response import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult import org.apache.linkis.httpclient.dws.response.DWSResult import org.apache.linkis.ujes.client.request.UserAction +import org.apache.linkis.ujes.client.utils.UJESClientUtils.evaluate + +import java.util import scala.beans.BeanProperty @@ -28,6 +31,31 @@ class ResultSetResult extends DWSResult with UserAction { private var `type`: String = _ + private var metadataList: util.List[util.Map[String, String]] = _ + + private var fileContentList: util.List[util.ArrayList[_]] = _ + + def getMetadataList: util.List[util.Map[String, String]] = { + metadata.asInstanceOf[util.List[util.Map[String, String]]] + } + + def getRowList: util.List[util.ArrayList[Any]] = { + val metaData = metadata.asInstanceOf[util.List[util.Map[String, String]]] + val fileContentList = fileContent.asInstanceOf[util.List[util.ArrayList[Any]]] + for (metaDataColnum <- 1 to metaData.size()) { + val col = metaData.get(metaDataColnum - 1) + if (!col.get("dataType").equals("string")) { + for (cursor <- 1 to fileContentList.size()) { + val colDataList = fileContentList.get(cursor - 1) + var colData = colDataList.get(metaDataColnum - 1) + colData = evaluate(col.get("dataType"), colData.toString) + colDataList.set(metaDataColnum - 1, colData) + } + } + } + fileContentList + } + def setType(`type`: String): Unit = this.`type` = `type` def getType: String = `type` diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala similarity index 69% rename from linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala rename to linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala index 9137001c14..837399f2d9 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala @@ -15,13 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.protocol.engine +package org.apache.linkis.ujes.client.response -case class EngineStateTransitionRequest(engineInstance: String, state: String) +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.ujes.client.request.UserAction -case class EngineStateTransitionResponse( - engineInstance: String, - state: String, - result: Boolean, - message: String -) +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/upload") +class UploadFileResult extends DWSResult with UserAction {} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala index 9615a89bc0..e75929ea8f 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala @@ -17,10 +17,19 @@ package org.apache.linkis.ujes.client.utils +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException import org.apache.linkis.ujes.client.request.JobExecuteAction.{EngineType, RunType} +import org.apache.linkis.ujes.client.response.ResultSetResult + +import java.util +import java.util.Locale + +import com.google.gson.{Gson, JsonObject} object UJESClientUtils { + val gson: Gson = new Gson() + def toEngineType(engineType: String): EngineType = engineType match { case "spark" => EngineType.SPARK case "hive" => EngineType.HIVE @@ -48,4 +57,31 @@ object UJESClientUtils { case _ => EngineType.SPARK.SQL } + def evaluate(dataType: String, value: String): Any = { + if (value == null || value.equals("null") || value.equals("NULL") || value.equals("Null")) { + dataType.toLowerCase(Locale.getDefault) match { + case "string" | "char" | "varchar" | "nvarchar" => value + case _ => null + } + } else { + dataType.toLowerCase(Locale.getDefault) match { + case null => throw new UJESClientBuilderException("data is empty") + case "char" | "varchar" | "nvarchar" | "string" => value + case "short" => value.toShort + case "int" => value.toInt + case "long" => value.toLong + case "float" => value.toFloat + case "double" => value.toDouble + case "boolean" => value.toBoolean + case "byte" => value.toByte + case "bigint" => value.toLong + case "decimal" => value.toDouble + case "array" => gson.fromJson(value, classOf[util.ArrayList[Object]]) + case "map" => gson.fromJson(value, classOf[util.HashMap[Object, Object]]) + case "struct" => gson.fromJson(value, classOf[JsonObject]) + case _ => value + } + } + } + } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/IExcelRowDeal.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java similarity index 76% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/IExcelRowDeal.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java index 4051041747..4c914bc3f4 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/IExcelRowDeal.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java @@ -15,12 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.storage.excel; +package org.apache.linkis.governance.common.constant; -import org.apache.poi.hssf.record.BoundSheetRecord; - -import java.util.List; - -interface IExcelRowDeal { - void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist); +public class CodeConstants { + // will auto append at end of scala code; make sure the last line is not a comment + public static String SCALA_CODE_AUTO_APPEND_CODE = "val linkisVar=123"; } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java new file mode 100644 index 0000000000..13cbac5577 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.entity; + +public class TemplateConfKey { + + private String templateUuid; + + private String key; + + private String templateName; + + private String configValue; + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + @Override + public String toString() { + return "TemplateKey{" + + "templateUuid='" + + templateUuid + + '\'' + + ", key='" + + key + + '\'' + + ", templateName='" + + templateName + + '\'' + + ", configValue='" + + configValue + + '\'' + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java index d5d97aa364..46fa8a69ef 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java @@ -49,6 +49,9 @@ public class JobRequest { /** result location */ private String resultLocation; + /** Task status updates is ordered, if false, not checked */ + private Boolean updateOrderFlag = true; + private String observeInfo; private Map metrics = new HashMap<>(); @@ -205,6 +208,14 @@ public void setObserveInfo(String observeInfo) { this.observeInfo = observeInfo; } + public Boolean getUpdateOrderFlag() { + return updateOrderFlag; + } + + public void setUpdateOrderFlag(Boolean updateOrderFlag) { + this.updateOrderFlag = updateOrderFlag; + } + @Override public String toString() { return "JobRequest{" diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java similarity index 55% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java index 62f7aea661..e8b566cda1 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java @@ -15,39 +15,38 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.server.service; +package org.apache.linkis.governance.common.protocol.conf; -import org.apache.linkis.protocol.message.RequestMethod; import org.apache.linkis.protocol.message.RequestProtocol; -public abstract class EngineConnResourceRequest implements RequestProtocol, RequestMethod { +public class TemplateConfRequest implements RequestProtocol { - private String engineConnType; - private String version; + private String templateUuid; - private boolean force; + private String templateName; - public String getEngineConnType() { - return engineConnType; + public TemplateConfRequest(String templateUuid, String templateName) { + this.templateUuid = templateUuid; + this.templateName = templateName; } - public void setEngineConnType(String engineConnType) { - this.engineConnType = engineConnType; + public TemplateConfRequest(String templateUuid) { + this.templateUuid = templateUuid; } - public String getVersion() { - return version; + public String getTemplateUuid() { + return templateUuid; } - public void setVersion(String version) { - this.version = version; + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; } - public boolean getForce() { - return force; + public String getTemplateName() { + return templateName; } - public void setForce(boolean force) { - this.force = force; + public void setTemplateName(String templateName) { + this.templateName = templateName; } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/ManagerLabelService.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java similarity index 68% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/ManagerLabelService.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java index 01cbfa9ac0..8822fe988d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/ManagerLabelService.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java @@ -15,18 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.label; +package org.apache.linkis.governance.common.protocol.conf; -import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.governance.common.entity.TemplateConfKey; +import java.util.ArrayList; import java.util.List; -public interface ManagerLabelService { +public class TemplateConfResponse { - boolean isEngine(ServiceInstance serviceInstance); + private List list = new ArrayList<>(); - boolean isEngine(List> labels); + public List getList() { + return list; + } - boolean isEM(ServiceInstance serviceInstance); + public void setList(List list) { + this.list = list; + } } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java deleted file mode 100644 index 739c256af1..0000000000 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.governance.common.utils; - -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.time.DateFormatUtils; - -import java.io.File; -import java.nio.file.Paths; - -public class ECPathUtils { - - public static String getECWOrkDirPathSuffix(String user, String ticketId, String engineType) { - String engineTypeRes = ""; - if (StringUtils.isNotBlank(engineType)) { - engineTypeRes = engineType; - } - File file = - Paths.get( - user, DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd"), engineTypeRes) - .toFile(); - return file.getPath() + File.separator + ticketId; - } - - public static String getECLogDirSuffix( - EngineTypeLabel engineTypeLabel, UserCreatorLabel userCreatorLabel, String ticketId) { - if (null == engineTypeLabel || null == userCreatorLabel) { - return ""; - } - String ecwOrkDirPathSuffix = - ECPathUtils.getECWOrkDirPathSuffix( - userCreatorLabel.getUser(), ticketId, engineTypeLabel.getEngineType()); - return ecwOrkDirPathSuffix + File.separator + "logs"; - } -} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala similarity index 81% rename from linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala rename to linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala index a4671eaa17..b0c9dda666 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala @@ -18,7 +18,7 @@ package org.apache.linkis.governance.common.conf import org.apache.linkis.common.conf.{CommonVars, Configuration} -import org.apache.linkis.manager.label.conf.LabelCommonConfig +import org.apache.linkis.governance.common.constant.ec.ECConstants object GovernanceCommonConf { @@ -26,11 +26,9 @@ object GovernanceCommonConf { val WILDCARD_CONSTANT = "*" - val SPARK_ENGINE_VERSION = - CommonVars("wds.linkis.spark.engine.version", LabelCommonConfig.SPARK_ENGINE_VERSION.getValue) + val SPARK_ENGINE_VERSION = CommonVars("wds.linkis.spark.engine.version", "2.4.3") - val HIVE_ENGINE_VERSION = - CommonVars("wds.linkis.hive.engine.version", LabelCommonConfig.HIVE_ENGINE_VERSION.getValue) + val HIVE_ENGINE_VERSION = CommonVars("wds.linkis.hive.engine.version", "1.2.1") val PYTHON_ENGINE_VERSION = CommonVars("wds.linkis.python.engine.version", "python2") @@ -46,9 +44,7 @@ object GovernanceCommonConf { val ENGINE_APPLICATION_MANAGER_SPRING_NAME = CommonVars("wds.linkis.application.manager.name", "linkis-cg-linkismanager") - val ENGINE_CONN_PORT_RANGE = CommonVars("linkis.engineconn.port.range", "-") - - val ENGINE_CONN_DEBUG_PORT_RANGE = CommonVars("linkis.engineconn.debug.port.range", "-") + val ENGINE_CONN_PORT_RANGE = CommonVars("wds.linkis.engineconn.port.range", "-") val MANAGER_SERVICE_NAME = CommonVars( @@ -90,10 +86,13 @@ object GovernanceCommonConf { val EC_APP_MANAGE_MODE = CommonVars("linkis.ec.app.manage.mode", "attach") - val SCALA_PARSE_APPEND_CODE_ENABLED = - CommonVars("linkis.scala.parse.append.code.enable", true).getValue - - val SCALA_PARSE_APPEND_CODE = - CommonVars("linkis.scala.parse.append.code", "val linkisVar=1").getValue + /** + * DEFAULT_LOGPATH_PREFIX is the prefix that represents the default log storage path + * DEFAULT_LOGPATH_PREFIX 是表示默认的日志存储路径的前缀 和 结果集的前缀 + */ + val DEFAULT_LOGPATH_PREFIX = CommonVars[String]( + "wds.linkis.entrance.config.log.path", + CommonVars[String]("wds.linkis.filesystem.hdfs.root.path").getValue + ).getValue } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala index a94eadf422..c418201f43 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala @@ -74,4 +74,8 @@ object ECConstants { val EC_OPERATE_STATUS = "status" val YARN_APP_RESULT_LIST_KEY = "yarnAppResultList" + + val HIVE_OPTS = "HIVE_OPTS" + + val SPARK_SUBMIT_OPTS = "SPARK_SUBMIT_OPTS" } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala index 8741c4297f..110b02b8fe 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala @@ -17,11 +17,6 @@ package org.apache.linkis.governance.common.constant.job -/** - * @date - * 2021/3/17 - * @description - */ object JobRequestConstants { val JOB_ID = "jobId" @@ -34,4 +29,11 @@ object JobRequestConstants { val JOB_DETAIL_LIST = "jobDetailList" + val JOB_SOURCE_TAGS = "job.source.tags" + + val LINKIS_JDBC_DEFAULT_DB = "linkis.jdbc.default.db" + + val ENABLE_DIRECT_PUSH = "enableDirectPush" + + val DIRECT_PUSH_FETCH_SIZE = "direct_push_fetch_size" } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala index 544dfcdab6..4168308bd6 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala @@ -17,15 +17,14 @@ package org.apache.linkis.governance.common.exception -import org.apache.linkis.common.exception.{ErrorException, ExceptionLevel, LinkisRuntimeException} +import org.apache.linkis.common.exception.ErrorException class GovernanceErrorException(errorCode: Int, errorMsg: String) - extends LinkisRuntimeException(errorCode, errorMsg) { + extends ErrorException(errorCode, errorMsg) { def this(errorCode: Int, errorMsg: String, cause: Throwable) = { this(errorCode, errorMsg) initCause(cause) } - override def getLevel: ExceptionLevel = ExceptionLevel.ERROR } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala index 87576d5e48..d5669ad428 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala @@ -19,6 +19,7 @@ package org.apache.linkis.governance.common.paser import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.governance.common.constant.CodeConstants import org.apache.linkis.governance.common.paser.CodeType.CodeType import org.apache.commons.lang3.StringUtils @@ -86,6 +87,11 @@ abstract class CombinedEngineCodeParser extends CodeParser { } +/** + * Scala is no longer using Parser but instead using EmptyParser. If there is a comment at the end, + * it will cause the task to become stuck + */ +@deprecated class ScalaCodeParser extends SingleCodeParser with Logging { override val codeType: CodeType = CodeType.Scala @@ -109,11 +115,9 @@ class ScalaCodeParser extends SingleCodeParser with Logging { case _ => } if (statementBuffer.nonEmpty) codeBuffer.append(statementBuffer.mkString("\n")) - - // Append code `val linkisVar=1` in ends to prevent bugs that do not exit tasks for a long time - if (GovernanceCommonConf.SCALA_PARSE_APPEND_CODE_ENABLED) { - codeBuffer.append(GovernanceCommonConf.SCALA_PARSE_APPEND_CODE) - } + // Make sure the last line is not a comment + codeBuffer.append("\n") + codeBuffer.append(CodeConstants.SCALA_CODE_AUTO_APPEND_CODE) codeBuffer.toArray } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala new file mode 100644 index 0000000000..43d3c86b13 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf + +import org.apache.linkis.protocol.message.RequestProtocol + +trait AcrossClusterConf extends RequestProtocol + +case class AcrossClusterRequest(username: String) extends AcrossClusterConf + +case class AcrossClusterResponse(clusterName: String, queueName: String) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala new file mode 100644 index 0000000000..dbfe3f7b74 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf + +import org.apache.linkis.protocol.message.RequestProtocol + +trait DepartmentConf extends RequestProtocol + +case class DepartmentRequest(user: String) extends DepartmentConf + +case class DepartmentResponse(user: String, departmentId: String, departmentName: String) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala index e8d129453e..948501e772 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala @@ -23,4 +23,13 @@ trait TenantConf extends RequestProtocol case class TenantRequest(user: String, creator: String) extends TenantConf -case class TenantResponse(user: String, creator: String, tenant: String) +case class TenantResponse(user: String, creator: String, isValid: String, tenant: String) + +case class DepartTenantRequest(creator: String, departmentId: String) extends TenantConf + +case class DepartTenantResponse( + creator: String, + departmentId: String, + isValid: String, + tenant: String +) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala index 2e44739787..829a967aab 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala @@ -18,6 +18,7 @@ package org.apache.linkis.governance.common.protocol.job import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.protocol.RetryableProtocol import org.apache.linkis.protocol.message.RequestProtocol import java.util @@ -25,7 +26,7 @@ import java.util.Date import scala.beans.BeanProperty -trait JobReq extends RequestProtocol +trait JobReq extends RequestProtocol with RetryableProtocol case class JobReqInsert(jobReq: JobRequest) extends JobReq @@ -51,3 +52,10 @@ class RequestOneJob extends JobReq { } case class RequestAllJob(instance: String) extends JobReq + +case class RequestFailoverJob( + reqMap: util.Map[String, java.lang.Long], + statusList: util.List[String], + startTimestamp: Long, + limit: Int = 10 +) extends JobReq diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala index b136c61099..f59761dc43 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala @@ -24,14 +24,11 @@ import org.apache.linkis.protocol.message.RequestProtocol import java.util -case class ResponseTaskExecute(execId: String) - case class ResponseTaskProgress( execId: String, progress: Float, progressInfo: Array[JobProgressInfo] -) extends RetryableProtocol - with RequestProtocol +) extends RequestProtocol case class ResponseEngineLock(lock: String) @@ -42,34 +39,7 @@ case class EngineConcurrentInfo( failedTasks: Int ) -case class EngineOverloadInfo(maxMemory: Long, usedMemory: Long, systemCPUUsed: Float) - -case class ResponseEngineStatusChanged( - instance: String, - fromStatus: ExecutionNodeStatus, - toStatus: ExecutionNodeStatus, - overload: EngineOverloadInfo, - concurrent: EngineConcurrentInfo -) extends BroadcastProtocol - -case class ResponseEngineInfo( - createEntranceInstance: String, - creator: String, - user: String, - properties: util.Map[String, String] -) - -case class ResponseEngineStatus( - instance: String, - status: ExecutionNodeStatus, - overload: EngineOverloadInfo, - concurrent: EngineConcurrentInfo, - engineInfo: ResponseEngineInfo -) - -case class ResponseTaskLog(execId: String, log: String) - extends RetryableProtocol - with RequestProtocol +case class ResponseTaskLog(execId: String, log: String) extends RequestProtocol case class ResponseTaskError(execId: String, errorMsg: String) extends RetryableProtocol diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala new file mode 100644 index 0000000000..236046f3d4 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.utils + +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} + +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.time.DateFormatUtils + +import java.io.File +import java.nio.file.Paths + +object ECPathUtils { + + def getECWOrkDirPathSuffix( + user: String, + ticketId: String, + engineType: String, + timeStamp: Long = System.currentTimeMillis() + ): String = { + val suffix = if (StringUtils.isBlank(engineType)) { + Paths + .get(user, DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd")) + .toFile + .getPath + } else { + Paths + .get(user, DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd"), engineType) + .toFile + .getPath + } + suffix + File.separator + ticketId + } + + def getECLogDirSuffix( + engineTypeLabel: EngineTypeLabel, + userCreatorLabel: UserCreatorLabel, + ticketId: String + ): String = { + if (null == engineTypeLabel || null == userCreatorLabel) { + return "" + } + val suffix = ECPathUtils.getECWOrkDirPathSuffix( + userCreatorLabel.getUser, + ticketId, + engineTypeLabel.getEngineType + ) + suffix + File.separator + "logs" + } + +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala index 684dd371a1..31d9b1e9b2 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala @@ -145,9 +145,13 @@ class DefaultEngineConnArgumentsParser extends EngineConnArgumentsParser { val options = ArrayBuffer[String]() def write(confMap: Map[String, String], optionType: String): Unit = confMap.foreach { case (key, value) => - if (StringUtils.isNotEmpty(key) && StringUtils.isNotEmpty(value)) { + var realValue = value + if (key.startsWith("label") && StringUtils.isEmpty(realValue)) { + realValue = "true" + } + if (StringUtils.isNotEmpty(key) && StringUtils.isNotEmpty(realValue)) { options += optionType - options += (key + "=" + value) + options += (key + "=" + realValue) } } write(engineConnArguments.getEngineConnConfMap, ENGINE_CONN_CONF) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala index ddcb17a3b2..ae83749ecb 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala @@ -24,11 +24,14 @@ import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.commons.lang3.StringUtils import java.io.File +import java.text.SimpleDateFormat import java.util -import java.util.{ArrayList, List} +import java.util.{ArrayList, Date, List} object GovernanceUtils extends Logging { + val LINKIS_DEFAULT_RES_CREATOR = "linkis_default" + def killProcess(pid: String, desc: String, isSudo: Boolean): Unit = { val subProcessKillScriptPath = Configuration.getLinkisHome() + "/sbin/kill-process-by-pid.sh" if ( @@ -121,4 +124,26 @@ object GovernanceUtils extends Logging { } } + /** + * get result path parentPath: resPrefix + dateStr + result + creator subPath: parentPath + + * executeUser + taskid + filename + * + * @param creator + * @return + */ + def getResultParentPath(creator: String): String = { + val resPrefix = GovernanceCommonConf.DEFAULT_LOGPATH_PREFIX + val resStb = new StringBuilder() + if (resStb.endsWith("/")) { + resStb.append(resPrefix) + } else { + resStb.append(resPrefix).append("/") + } + val dateFormat = new SimpleDateFormat("yyyy-MM-dd") + val date = new Date(System.currentTimeMillis) + val dateString = dateFormat.format(date) + resStb.append("result").append("/").append(dateString).append("/").append(creator) + resStb.toString() + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala index d328ebb63a..8c6522cdbb 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala @@ -19,8 +19,6 @@ package org.apache.linkis.governance.common.utils import org.apache.linkis.governance.common.constant.job.JobRequestConstants -import org.apache.commons.collections.MapUtils - import java.util; object JobUtils { @@ -36,7 +34,33 @@ object JobUtils { } def getJobIdFromStringMap(map: util.Map[String, String]): String = { - if (MapUtils.isNotEmpty(map)) map.getOrDefault(JobRequestConstants.JOB_ID, null) else null + if (null != map && map.containsKey(JobRequestConstants.JOB_ID)) { + val value = map.get(JobRequestConstants.JOB_ID) + if (null != value) { + return value + } + } + null + } + + def getJobSourceTagsFromStringMap(map: util.Map[String, String]): String = { + if (null != map && map.containsKey(JobRequestConstants.JOB_SOURCE_TAGS)) { + val value = map.get(JobRequestConstants.JOB_SOURCE_TAGS) + if (null != value) { + return value + } + } + null + } + + def getJobSourceTagsFromObjectMap(map: util.Map[String, Object]): String = { + if (null != map && map.containsKey(JobRequestConstants.JOB_SOURCE_TAGS)) { + val value = map.get(JobRequestConstants.JOB_SOURCE_TAGS) + if (null != value) { + return value.toString + } + } + null } } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala index 2c426339b0..dd4b9bcffa 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala @@ -58,14 +58,14 @@ object OnceExecutorContentUtils { def mapToContent(contentMap: util.Map[String, Object]): OnceExecutorContent = { val onceExecutorContent = new OnceExecutorContent - def getOrNull(key: String): util.Map[String, Object] = contentMap.get(key) match { + implicit def getOrNull(key: String): util.Map[String, Object] = contentMap.get(key) match { case map: util.Map[String, Object] => map case _ => null } - onceExecutorContent.setJobContent(getOrNull(TaskConstant.JOB_CONTENT)) - onceExecutorContent.setRuntimeMap(getOrNull(TaskConstant.PARAMS_CONFIGURATION_RUNTIME)) - onceExecutorContent.setSourceMap(getOrNull(TaskConstant.SOURCE)) - onceExecutorContent.setVariableMap(getOrNull(TaskConstant.PARAMS_VARIABLE)) + onceExecutorContent.setJobContent(TaskConstant.JOB_CONTENT) + onceExecutorContent.setRuntimeMap(TaskConstant.PARAMS_CONFIGURATION_RUNTIME) + onceExecutorContent.setSourceMap(TaskConstant.SOURCE) + onceExecutorContent.setVariableMap(TaskConstant.PARAMS_VARIABLE) onceExecutorContent } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala index db7045baec..04adf3446c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala @@ -29,7 +29,8 @@ class ScalaCodeParserTest { "val codeBuffer = new ArrayBuffer[String]()\n val statementBuffer = new ArrayBuffer[String]()" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(scalaCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.size == 3) + } @Test @@ -40,7 +41,7 @@ class ScalaCodeParserTest { " def addInt( a:Int, b:Int )\n var sum:Int = 0\n sum = a + b\n return sum\n }" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(abnormalCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.length == 3) } @@ -53,7 +54,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(importCode) - Assertions.assertTrue(array.length == 3) + Assertions.assertTrue(array.length == 4) } @@ -67,7 +68,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val arrayResult1 = scalaCodeParser.parse(specialCodeExp1) - Assertions.assertTrue(arrayResult1.length == 3) + Assertions.assertTrue(arrayResult1.length == 4) val specialCodeExp2 = " @BeanProperty\n var id: Long = _\n @BeanProperty\n var status: Int = 0\n " + @@ -78,7 +79,7 @@ class ScalaCodeParserTest { ".append(data, that.data)\n .isEquals\n }" val arrayResult2 = scalaCodeParser.parse(specialCodeExp2) - Assertions.assertTrue(arrayResult2.length == 2) + Assertions.assertTrue(arrayResult2.length == 3) } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala index 672c0e8acb..cc79e24d4f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala @@ -42,7 +42,6 @@ import org.apache.commons.lang3.StringUtils import java.io.{File, InputStream, OutputStream} import scala.collection.JavaConverters._ -import scala.collection.mutable trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { @@ -102,12 +101,7 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { case HIVE_CONF_DIR => putIfExists(HIVE_CONF_DIR) case JAVA_HOME => putIfExists(JAVA_HOME) case RANDOM_PORT => - environment.put( - RANDOM_PORT.toString, - PortUtils - .findAvailPortByRange(GovernanceCommonConf.ENGINE_CONN_DEBUG_PORT_RANGE.getValue) - .toString - ) + environment.put(RANDOM_PORT.toString, PortUtils.findAvailPort().toString) case PREFER_IP_ADDRESS => environment.put(PREFER_IP_ADDRESS.toString, Configuration.PREFER_IP_ADDRESS.toString) case ENGINECONN_ENVKEYS => @@ -157,27 +151,30 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { def getPid(): Option[String] = None protected def getCommandArgs: Array[String] = { - val recordMap: mutable.Map[String, String] = mutable.Map() - request.creationDesc.properties.asScala.foreach { case (k, v) => - if (k.contains(" ") || (v != null && v.contains(" "))) recordMap.put(k, v) - } - if (recordMap.size > 0) { - val keyAndValue = new StringBuilder - for (kv <- recordMap) { - keyAndValue.append(s"${kv._1}->${kv._2};") - } + if ( + request.creationDesc.properties.asScala.exists { case (k, v) => + k.contains(" ") || (v != null && v.contains(" ")) + } + ) { throw new ErrorException( 30000, - s"Startup parameters contain spaces! The key and value values of all its parameters are(启动参数中包含空格!其所有参数的key和value值分别为):${keyAndValue.toString()}" - ) + "Startup parameters contain spaces!(启动参数中包含空格!)" + ) // TODO exception } val arguments = EngineConnArgumentsBuilder.newBuilder() engineConnPort = PortUtils .findAvailPortByRange(GovernanceCommonConf.ENGINE_CONN_PORT_RANGE.getValue) .toString - var springConf = Map("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") - + var springConf = + Map[String, String]("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") + val properties = + PortUtils.readFromProperties(Configuration.getLinkisHome + "/conf/version.properties") + if (StringUtils.isNotBlank(properties.getProperty("version"))) { + springConf += ("eureka.instance.metadata-map.linkis.app.version" -> properties.getProperty( + "version" + )) + } request.creationDesc.properties.asScala.filter(_._1.startsWith("spring.")).foreach { case (k, v) => springConf = springConf + (k -> v) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala index b3715a8910..8552020493 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala @@ -17,15 +17,16 @@ package org.apache.linkis.ecm.core.utils -import org.apache.linkis.common.utils.Utils +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils -import java.io.IOException +import java.io.{BufferedReader, FileReader, IOException} import java.net.ServerSocket +import java.util.Properties -object PortUtils { +object PortUtils extends Logging { /** * portRange: '-' is the separator @@ -43,7 +44,7 @@ object PortUtils { val ports = portRange.split(separator, -1) if (!ports(0).isEmpty) start = ports(0).toInt if (!ports(1).isEmpty) end = ports(1).toInt - val availablePort = start to end find { port => + val availablePort = start until end find { port => try { new ServerSocket(port).close() true @@ -62,4 +63,23 @@ object PortUtils { Utils.tryFinally(socket.getLocalPort)(IOUtils.closeQuietly(socket)) } + def readFromProperties(propertiesFile: String): Properties = { + val properties: Properties = new Properties + var reader: BufferedReader = null; + try { + reader = new BufferedReader(new FileReader(propertiesFile)) + properties.load(reader) + } catch { + case e: Exception => + logger.warn(s"loading vsersion faild with path $propertiesFile error:$e") + } finally { + try if (reader != null) reader.close + catch { + case e: Exception => + logger.warn(s"try to close buffered reader with error:${e.getMessage}") + } + } + properties + } + } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml index 41022d30da..99458c8afc 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml @@ -51,6 +51,12 @@ linkis-rpc ${project.version} provided + + + com.google.guava + guava + + diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml index da4e71662b..f24c7e21a1 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml @@ -244,9 +244,6 @@ org.slf4j:jul-to-slf4j:jar org.slf4j:slf4j-api:jar org.springframework.boot:spring-boot:jar - org.springframework.boot:spring-boot-actuator:jar - org.springframework.boot:spring-boot-actuator-autoconfigure:jar - org.springframework.boot:spring-boot-autoconfigure:jar org.springframework.boot:spring-boot-starter:jar org.springframework.boot:spring-boot-starter-actuator:jar org.springframework.boot:spring-boot-starter-aop:jar diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java index ca4412824d..0c745ef64d 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java @@ -33,7 +33,15 @@ public enum EngineconnServerErrorCodeSummary implements LinkisErrorCode { 11110, "the parameters of engineConnInstance and ticketId are both not exists.(engineConnInstance 和ticketId 的参数都不存在.)"), LOG_IS_NOT_EXISTS(11110, "Log directory {0} does not exists.(日志目录 {0} 不存在.)"), - FAILED_TO_DOWNLOAD(911115, "failed to downLoad(下载失败)"); + FAILED_TO_DOWNLOAD(911115, "failed to downLoad(下载失败)"), + FILE_IS_OVERSIZE(911116, "Download file has exceeded 100MB(下载文件已超过100M)"), + PARAMETER_NOT_NULL(911117, "Parameter {0} cannot be empty (参数 {0} 不能为空)"), + LOGTYPE_ERROR( + 911118, + "logType only supports stdout, stderr, gc, yarnApp(logType仅支持stdout,stderr,gc,yarnApp)"), + NOT_PERMISSION( + 911119, "You {0} have no permission to download Log in ECM {1}(用户 {0} 无权限下载 ECM {1} 日志)"), + ; /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java new file mode 100644 index 0000000000..2de878a24c --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.Consts; + +import org.springframework.web.bind.annotation.*; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.attribute.FileOwnerAttributeView; +import java.nio.file.attribute.UserPrincipal; +import java.text.MessageFormat; + +import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.*; + +@Api(tags = "ECM") +@RequestMapping(path = "/engineconnManager") +@RestController +public class ECMRestfulApi { + + private final Logger logger = LoggerFactory.getLogger(ECMRestfulApi.class); + + /** + * * Reason for using the get method: Added gateway forwarding rules, which only support get + * requests + * + * @param req + * @param response + * @param emInstance + * @param instance + * @param logDirSuffix + * @param logType + * @throws IOException + */ + @ApiOperation( + value = "downloadEngineLog", + notes = "download engine log", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "emInstance", + required = true, + dataType = "String", + example = "xxx0002:9102"), + @ApiImplicitParam( + name = "instance", + required = true, + dataType = "String", + example = "xxx0002:35873"), + @ApiImplicitParam(name = "logDirSuffix", required = true, dataType = "String"), + @ApiImplicitParam(name = "logType", required = true, dataType = "String") + }) + @ApiOperationSupport(ignoreParameters = {"json"}) + @RequestMapping(path = "/downloadEngineLog", method = RequestMethod.GET) + public Message downloadEngineLog( + HttpServletRequest req, + HttpServletResponse response, + @RequestParam(value = "emInstance") String emInstance, + @RequestParam(value = "instance") String instance, + @RequestParam(value = "logDirSuffix") String logDirSuffix, + @RequestParam(value = "logType") String logType) + throws IOException { + String userName = ModuleUserUtils.getOperationUser(req, "downloadEngineLog"); + if (StringUtils.isBlank(instance)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "instance")); + } + if (StringUtils.isBlank(logDirSuffix)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "logDirSuffix")); + } + if (StringUtils.isBlank(logType)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "logType")); + } else if (!logType.equals("stdout") + && !logType.equals("stderr") + && !logType.equals("gc") + && !logType.equals("yarnApp")) { + return Message.error(LOGTYPE_ERROR.getErrorDesc()); + } + // 获取文件的权限归属者 + FileOwnerAttributeView ownerView = + Files.getFileAttributeView( + Paths.get(logDirSuffix + "/" + logType), FileOwnerAttributeView.class); + UserPrincipal owner = ownerView.getOwner(); + if (!owner.getName().equals(userName) + && Configuration.isNotAdmin(userName) + && Configuration.isNotJobHistoryAdmin(userName)) { + return Message.error( + MessageFormat.format(NOT_PERMISSION.getErrorDesc(), userName, emInstance)); + } + File inputFile = new File(logDirSuffix, logType); + if (!inputFile.exists()) { + return Message.error(MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc(), logDirSuffix)); + } else { + long fileSizeInBytes = inputFile.length(); + long fileSizeInMegabytes = fileSizeInBytes / (1024 * 1024); + if (fileSizeInMegabytes > 100) { + return Message.error(MessageFormat.format(FILE_IS_OVERSIZE.getErrorDesc(), logDirSuffix)); + } + ServletOutputStream outputStream = null; + FileInputStream inputStream = null; + BufferedInputStream fis = null; + PrintWriter writer = null; + try { + inputStream = new FileInputStream(inputFile); + fis = new BufferedInputStream(inputStream); + byte[] buffer = new byte[1024]; + int bytesRead = 0; + response.setCharacterEncoding(Consts.UTF_8.toString()); + java.nio.file.Path source = Paths.get(inputFile.getPath()); + response.addHeader("Content-Type", Files.probeContentType(source)); + // filename eg:xxx002_11529_stdout.txt + response.addHeader( + "Content-Disposition", + "attachment;filename=" + instance.replace(":", "_") + "_" + logType + ".txt"); + response.addHeader("Content-Length", fileSizeInBytes + ""); + outputStream = response.getOutputStream(); + while ((bytesRead = fis.read(buffer, 0, 1024)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + } catch (IOException e) { + logger.error("Download EngineLog Failed Msg :", e); + response.reset(); + response.setCharacterEncoding(Consts.UTF_8.toString()); + response.setContentType("text/plain; charset=utf-8"); + writer = response.getWriter(); + writer.append("error(错误):" + e.getMessage()); + writer.flush(); + } finally { + if (outputStream != null) { + outputStream.flush(); + } + IOUtils.closeQuietly(outputStream); + IOUtils.closeQuietly(fis); + IOUtils.closeQuietly(inputStream); + } + return Message.ok(); + } + } +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java index 111ad896e0..409f0d7e98 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java @@ -46,7 +46,7 @@ public void run() { cmdlist.add("sh"); cmdlist.add(shellPath + "linkis-ec-clear.sh"); try { - Utils.exec(cmdlist.toArray(new String[0]), 3000L); + Utils.exec(cmdlist.toArray(new String[0]), 1800000L); } catch (Exception e) { logger.warn("Shell linkis-ec-clear.sh execution failed, msg:" + e.getMessage()); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java deleted file mode 100644 index c6f8368a1c..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.operator; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.ecm.server.conf.ECMConfiguration; -import org.apache.linkis.ecm.server.exception.ECMErrorException; -import org.apache.linkis.manager.common.operator.Operator; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.io.input.ReversedLinesFileReader; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang3.tuple.Triple; - -import java.io.File; -import java.io.IOException; -import java.io.RandomAccessFile; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.text.MessageFormat; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.*; - -public class EngineConnLogOperator implements Operator { - private static final Logger logger = LoggerFactory.getLogger(EngineConnLogOperator.class); - - public static final String OPERATOR_NAME = "engineConnLog"; - public static final CommonVars LOG_FILE_NAME = - CommonVars.apply("linkis.engineconn.log.filename", "stdout"); - public static final CommonVars MAX_LOG_FETCH_SIZE = - CommonVars.apply("linkis.engineconn.log.fetch.lines.max", 5000); - public static final CommonVars MAX_LOG_TAIL_START_SIZE = - CommonVars.apply("linkis.engineconn.log.tail.start.size"); - public static final CommonVars MULTILINE_PATTERN = - CommonVars.apply( - "linkis.engineconn.log.multiline.pattern", - "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}"); - public static final CommonVars MULTILINE_MAX = - CommonVars.apply("linkis.engineconn.log.multiline.max", 500); - - @Override - public String[] getNames() { - return new String[] {OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - File logPath = getLogPath(parameters); - int lastRows = getAs(parameters, "lastRows", 0); - int pageSize = getAs(parameters, "pageSize", 100); - int fromLine = getAs(parameters, "fromLine", 1); - boolean enableTail = getAs(parameters, "enableTail", false); - if (lastRows > EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue()) { - throw new ECMErrorException( - CANNOT_FETCH_MORE_THAN.getErrorCode(), - MessageFormat.format( - CANNOT_FETCH_MORE_THAN.getErrorDesc(), - EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue().toString())); - } else if (lastRows > 0) { - String logs = Utils.exec(new String[] {"tail", "-n", lastRows + "", logPath.getPath()}, 5000); - Map stringObjectHashMap = new HashMap<>(); - stringObjectHashMap.put("logs", logs.split("\n")); - stringObjectHashMap.put("rows", logs.length()); - return stringObjectHashMap; - } - - String ignoreKeywords = getAs(parameters, "ignoreKeywords", ""); - String[] ignoreKeywordList = - StringUtils.isNotEmpty(ignoreKeywords) ? ignoreKeywords.split(",") : new String[0]; - - String onlyKeywords = getAs(parameters, "onlyKeywords", ""); - String[] onlyKeywordList = - StringUtils.isNotEmpty(onlyKeywords) ? onlyKeywords.split(",") : new String[0]; - - RandomAccessFile randomReader = null; - ReversedLinesFileReader reversedReader = null; - try { - if (enableTail) { - logger.info("enable log operator from tail to read"); - reversedReader = new ReversedLinesFileReader(logPath, Charset.defaultCharset()); - } else { - randomReader = new RandomAccessFile(logPath, "r"); - } - - ArrayList logs = new ArrayList<>(pageSize); - int readLine = 0, skippedLine = 0, lineNum = 0; - boolean rowIgnore = false; - int ignoreLine = 0; - Pattern linePattern = Pattern.compile(EngineConnLogOperator.MULTILINE_PATTERN.getValue()); - - int maxMultiline = MULTILINE_MAX.getValue(); - String line = randomAndReversedReadLine(randomReader, reversedReader); - - while (readLine < pageSize && line != null) { - lineNum += 1; - if (skippedLine < fromLine - 1) { - skippedLine += 1; - } else { - if (rowIgnore) { - Matcher matcher = linePattern.matcher(line); - if (matcher.matches()) { - ignoreLine = 0; - rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList); - } else { - ignoreLine += 1; - if (ignoreLine >= maxMultiline) { - rowIgnore = false; - } - } - if (!matcher.matches()) { - rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList); - } - } else { - rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList); - } - if (!rowIgnore) { - logs.add(line); - readLine += 1; - } - } - line = randomAndReversedReadLine(randomReader, reversedReader); - } - - if (enableTail) { - Collections.reverse(logs); - } - - Map resultMap = new HashMap<>(); - resultMap.put("logPath", logPath.getPath()); - resultMap.put("logs", logs); - resultMap.put("endLine", lineNum); - resultMap.put("rows", readLine); - return resultMap; - } catch (IOException e) { - logger.info("EngineConnLogOperator apply failed", e); - throw new ECMErrorException( - LOG_IS_NOT_EXISTS.getErrorCode(), LOG_IS_NOT_EXISTS.getErrorDesc()); - } finally { - IOUtils.closeQuietly(randomReader); - IOUtils.closeQuietly(reversedReader); - } - } - - private String randomAndReversedReadLine( - RandomAccessFile randomReader, ReversedLinesFileReader reversedReader) throws IOException { - if (randomReader != null) { - String line = randomReader.readLine(); - if (line != null) { - return new String(line.getBytes(StandardCharsets.ISO_8859_1), Charset.defaultCharset()); - } else { - return null; - } - } else { - return reversedReader.readLine(); - } - } - - protected File getLogPath(Map parameters) { - String logType = getAs(parameters, "logType", EngineConnLogOperator.LOG_FILE_NAME.getValue()); - - Triple engineConnInfo = getEngineConnInfo(parameters); - String engineConnLogDir = engineConnInfo.getLeft(); - String engineConnInstance = engineConnInfo.getMiddle(); - String ticketId = engineConnInfo.getRight(); - - File logPath = new File(engineConnLogDir, logType); - if (!logPath.exists() || !logPath.isFile()) { - throw new ECMErrorException( - LOGFILE_IS_NOT_EXISTS.getErrorCode(), - MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc(), logPath.toString())); - } - logger.info( - String.format( - "Try to fetch EngineConn(id: %s, instance: %s) logs from %s.", - ticketId, engineConnInstance, logPath.getPath())); - return logPath; - } - - protected Triple getEngineConnInfo(Map parameters) { - String logDIrSuffix = getAs(parameters, "logDirSuffix", ""); - String engineConnLogDir = - ECMConfiguration.ENGINECONN_ROOT_DIR() + File.separator + logDIrSuffix; - String ticketId = getAs(parameters, "ticketId", ""); - String engineConnInstance = ""; - return Triple.of(engineConnLogDir, engineConnInstance, ticketId); - } - - private boolean includeLine(String line, String[] onlyKeywordList, String[] ignoreKeywordList) { - boolean accept = - ignoreKeywordList.length == 0 || !Arrays.stream(ignoreKeywordList).anyMatch(line::contains); - if (accept) { - accept = - onlyKeywordList.length == 0 || Arrays.stream(onlyKeywordList).anyMatch(line::contains); - } - return accept; - } -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java deleted file mode 100644 index 6d3548274c..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.operator; - -import org.apache.linkis.common.exception.WarnException; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary; -import org.apache.linkis.ecm.server.exception.ECMErrorException; -import org.apache.linkis.ecm.server.exception.ECMWarnException; - -import org.apache.commons.lang3.tuple.Triple; - -import java.io.File; -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.LOG_IS_NOT_EXISTS; - -public class EngineConnYarnLogOperator extends EngineConnLogOperator { - private static final Logger logger = LoggerFactory.getLogger(EngineConnYarnLogOperator.class); - - private static final String YARN_LOG_OPERATOR_NAME = "engineConnYarnLog"; - - @Override - public String[] getNames() { - return new String[] {EngineConnYarnLogOperator.YARN_LOG_OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - Map result = new HashMap<>(); - try { - result = super.apply(parameters); - } finally { - Object logPath = result.get("logPath"); - if (logPath instanceof String) { - File logFile = new File((String) logPath); - if (logFile.exists() && logFile.getName().startsWith(".")) { - // If is a temporary file, drop it - logger.info(String.format("Delete the temporary yarn log file: [%s]", logPath)); - if (!logFile.delete()) { - logger.warn(String.format("Fail to delete the temporary yarn log file: [%s]", logPath)); - } - } - } - } - return result; - } - - @Override - public File getLogPath(Map parameters) { - String ticketId, engineConnInstance, engineConnLogDir; - Triple engineConnInfo = getEngineConnInfo(parameters); - ticketId = engineConnInfo.getRight(); - engineConnInstance = engineConnInfo.getMiddle(); - engineConnLogDir = engineConnInfo.getLeft(); - - File rootLogDir = new File(engineConnLogDir); - if (!rootLogDir.exists() || !rootLogDir.isDirectory()) { - throw new ECMWarnException( - LOG_IS_NOT_EXISTS.getErrorCode(), - MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc(), rootLogDir)); - } - - String creator = getAsThrow(parameters, "creator"); - String applicationId = getAsThrow(parameters, "yarnApplicationId"); - File logPath = new File(engineConnLogDir, "yarn_" + applicationId); - if (!logPath.exists()) { - String tempLogFile = - String.format( - ".yarn_%s_%d_%d", - applicationId, System.currentTimeMillis(), Thread.currentThread().getId()); - try { - String command = - String.format( - "yarn logs -applicationId %s >> %s/%s", applicationId, rootLogDir, tempLogFile); - logger.info(String.format("Fetch yarn logs to temporary file: [%s]", command)); - - ProcessBuilder processBuilder = new ProcessBuilder(sudoCommands(creator, command)); - processBuilder.environment().putAll(System.getenv()); - processBuilder.redirectErrorStream(false); - Process process = processBuilder.start(); - boolean waitFor = process.waitFor(5, TimeUnit.SECONDS); - logger.trace(String.format("waitFor: %b, result: %d", waitFor, process.exitValue())); - if (waitFor && process.waitFor() == 0) { - command = - String.format( - "mv %s/%s %s/yarn_%s", rootLogDir, tempLogFile, rootLogDir, applicationId); - logger.info(String.format("Move and save yarn logs: [%s]", command)); - Utils.exec(sudoCommands(creator, command)); - } else { - logPath = new File(engineConnLogDir, tempLogFile); - if (!logPath.exists()) { - throw new WarnException( - -1, - String.format( - "Fetch yarn logs timeout, log aggregation has not completed or is not enabled")); - } - } - } catch (Exception e) { - throw new WarnException( - -1, - String.format( - "Fail to fetch yarn logs application: %s, message: %s", - applicationId, e.getMessage())); - } - } - if (!logPath.exists() || !logPath.isFile()) { - throw new ECMErrorException( - EngineconnServerErrorCodeSummary.LOGFILE_IS_NOT_EXISTS.getErrorCode(), - MessageFormat.format( - EngineconnServerErrorCodeSummary.LOGFILE_IS_NOT_EXISTS.getErrorDesc(), logPath)); - } - logger.info( - String.format( - "Try to fetch EngineConn(id: %s, instance: %s) yarn logs from %s in application id: %s", - ticketId, engineConnInstance, logPath.getPath(), applicationId)); - - return logPath; - } - - private String[] sudoCommands(String creator, String command) { - return new String[] { - "/bin/bash", - "-c", - "sudo su " + creator + " -c \"source ~/.bashrc 2>/dev/null; " + command + "\"" - }; - } -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java index a6a932a578..2e351b00df 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java @@ -21,6 +21,7 @@ import org.apache.linkis.common.utils.Utils; import org.apache.linkis.ecm.server.conf.ECMConfiguration; import org.apache.linkis.ecm.server.service.EngineConnKillService; +import org.apache.linkis.ecm.utils.ECMCacheUtils; import org.apache.linkis.engineconn.common.conf.EngineConnConf; import org.apache.linkis.governance.common.utils.GovernanceUtils; import org.apache.linkis.manager.common.constant.AMConstant; @@ -61,6 +62,7 @@ public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest String pid = null; if (AMConstant.PROCESS_MARK.equals(engineStopRequest.getIdentifierType()) && StringUtils.isNotBlank(engineStopRequest.getIdentifier())) { + ECMCacheUtils.putStopECToCache(engineStopRequest.getServiceInstance(), engineStopRequest); pid = engineStopRequest.getIdentifier(); } logger.info("dealEngineConnStop return pid: {}", pid); @@ -189,12 +191,11 @@ private String getYarnAppRegexByEngineType(String engineType) { case "sqoop": regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); break; + case "seatunnel": + case "flink": case "hive": regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); break; - case "seatunnel": - regex = EngineConnConf.SEATUNNEL_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; default: regex = ""; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java similarity index 53% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java rename to linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java index 8e5b57678a..52d140055d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java @@ -15,37 +15,26 @@ * limitations under the License. */ -package org.apache.linkis.manager.label.score; +package org.apache.linkis.ecm.utils; import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.manager.common.entity.node.ScoreServiceInstance; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; -public class LabelScoreServiceInstance implements ScoreServiceInstance { +import java.util.concurrent.TimeUnit; - private double score = 0d; - private ServiceInstance serviceInstance; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; - public LabelScoreServiceInstance(ServiceInstance instance) { - this.serviceInstance = instance; - } - - @Override - public double getScore() { - return score; - } - - @Override - public void setScore(double score) { - this.score = score; - } +public class ECMCacheUtils { + private static Cache ecStopRequestCache = + CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.SECONDS).build(); - @Override - public ServiceInstance getServiceInstance() { - return serviceInstance; + public static void putStopECToCache( + ServiceInstance serviceInstance, EngineStopRequest engineStopRequest) { + ecStopRequestCache.put(serviceInstance, engineStopRequest); } - @Override - public void setServiceInstance(ServiceInstance serviceInstance) { - this.serviceInstance = serviceInstance; + public static EngineStopRequest getStopEC(ServiceInstance serviceInstance) { + return ecStopRequestCache.getIfPresent(serviceInstance); } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala index 8e5f9adfbb..dbd24e3562 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala @@ -20,7 +20,6 @@ package org.apache.linkis.ecm.server.conf import org.apache.linkis.common.conf.{CommonVars, TimeType} import org.apache.linkis.common.utils.ByteTimeUtils import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.manager.common.conf.RMConfiguration import java.io.File import java.util.concurrent.TimeUnit @@ -58,10 +57,9 @@ object ECMConfiguration { val ECM_MAX_CREATE_INSTANCES: Int = CommonVars[Integer]("wds.linkis.ecm.engineconn.instances.max", 50).getValue - val ECM_PROTECTED_MEMORY: Long = CommonVars[Long]( - "wds.linkis.ecm.protected.memory", - ByteTimeUtils.byteStringAsBytes("4g") - ).getValue + val ECM_PROTECTED_MEMORY: Long = ByteTimeUtils.byteStringAsBytes( + CommonVars[String]("wds.linkis.ecm.protected.memory", "10g").getValue + ) val ECM_PROTECTED_CPU_LOAD: Double = CommonVars[Double]("wds.linkis.ecm.protected.cpu.load", 0.98d).getValue @@ -81,7 +79,7 @@ object ECMConfiguration { GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue val ECM_HEALTH_REPORT_PERIOD: Long = - CommonVars("wds.linkis.ecm.health.report.period", 30).getValue + CommonVars("wds.linkis.ecm.health.report.period", 10).getValue val ECM_HEALTH_REPORT_DELAY: Long = CommonVars("wds.linkis.ecm.health.report.delay", 10).getValue @@ -117,16 +115,8 @@ object ECMConfiguration { val ECM_PROCESS_SCRIPT_KILL: Boolean = CommonVars[Boolean]("wds.linkis.ecm.script.kill.engineconn", true).getValue - val ECM_YARN_CLUSTER_NAME: String = - CommonVars( - "wds.linkis.ecm.yarn.cluster.name", - RMConfiguration.DEFAULT_YARN_CLUSTER_NAME.getValue - ).getValue - - val ECM_YARN_CLUSTER_TYPE: String = - CommonVars( - "wds.linkis.ecm.yarn.cluster.type", - RMConfiguration.DEFAULT_YARN_TYPE.getValue - ).getValue - + val EC_CAN_RETRY_EXIT_CODES: Array[Int] = + CommonVars[String]("linkis.ecm.can.retry.exit.codes", "143").getValue + .split(",") + .map(_.toInt); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala index 1b397d04e5..e50b2ff84f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala @@ -17,22 +17,15 @@ package org.apache.linkis.ecm.server.exception -import org.apache.linkis.common.exception.{ - ErrorException, - ExceptionLevel, - LinkisRuntimeException, - WarnException -} +import org.apache.linkis.common.exception.{ErrorException, WarnException} -class ECMErrorException(errorCode: Int, desc: String) - extends LinkisRuntimeException(errorCode, desc) { +class ECMErrorException(errorCode: Int, desc: String) extends ErrorException(errorCode, desc) { def this(errorCode: Int, desc: String, t: Throwable) { this(errorCode, desc) this.initCause(t) } - override def getLevel: ExceptionLevel = ExceptionLevel.ERROR } class ECMWarnException(errCode: Int, desc: String) extends WarnException(errCode, desc) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala index 39efe74cc8..26cccfc9c3 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala @@ -113,6 +113,7 @@ class JarUDFLoadECMHook extends ECMHook with Logging { ) } } + // LaunchConstants.addPathToClassPath(pel.environment, udfDir + File.separator + "*") val udfJars = fileNameSet.map(udfDir + File.separator + _) pel.environment.put(Environment.UDF_JARS.toString, udfJars.mkString(",")); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala new file mode 100644 index 0000000000..66327dadcf --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.operator + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ +import org.apache.linkis.ecm.server.conf.ECMConfiguration +import org.apache.linkis.ecm.server.exception.ECMErrorException +import org.apache.linkis.ecm.server.service.LocalDirsHandleService +import org.apache.linkis.manager.common.operator.Operator + +import org.apache.commons.io.IOUtils +import org.apache.commons.io.input.ReversedLinesFileReader +import org.apache.commons.lang3.StringUtils + +import java.io.{File, RandomAccessFile} +import java.nio.charset.{Charset, StandardCharsets} +import java.text.MessageFormat +import java.util +import java.util.Collections + +import scala.util.matching.Regex + +class EngineConnLogOperator extends Operator with Logging { + + override def getNames: Array[String] = Array(EngineConnLogOperator.OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + val logPath = getLogPath + val lastRows = getAs("lastRows", 0) + val pageSize = getAs("pageSize", 100) + val fromLine = getAs("fromLine", 1) + val enableTail = getAs("enableTail", false) + if (lastRows > EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue) { + throw new ECMErrorException( + CANNOT_FETCH_MORE_THAN.getErrorCode, + MessageFormat.format( + CANNOT_FETCH_MORE_THAN.getErrorDesc, + EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue.toString + ) + ) + } else if (lastRows > 0) { + val logs = Utils.exec(Array("tail", "-n", lastRows + "", logPath.getPath), 5000).split("\n") + return Map("logs" -> logs, "rows" -> logs.length) + } + + val ignoreKeywords = getAs("ignoreKeywords", "") + val ignoreKeywordList = + if (StringUtils.isNotEmpty(ignoreKeywords)) ignoreKeywords.split(",") + else Array.empty[String] + val onlyKeywords = getAs("onlyKeywords", "") + val onlyKeywordList = + if (StringUtils.isNotEmpty(onlyKeywords)) onlyKeywords.split(",") else Array.empty[String] + var randomReader: RandomAccessFile = null + var reversedReader: ReversedLinesFileReader = null + if (enableTail) { + logger.info("enable log operator from tail to read") + reversedReader = new ReversedLinesFileReader(logPath, Charset.defaultCharset()) + } else { + randomReader = new RandomAccessFile(logPath, "r") + } + def randomAndReversedReadLine(): String = { + if (null != randomReader) { + val line = randomReader.readLine() + if (line != null) { + new String(line.getBytes(StandardCharsets.ISO_8859_1), Charset.defaultCharset()) + } else null + } else { + reversedReader.readLine() + } + } + val logs = new util.ArrayList[String](pageSize) + var readLine, skippedLine, lineNum = 0 + var rowIgnore = false + var ignoreLine = 0 + val linePattern = getLinePattern + val maxMultiline = EngineConnLogOperator.MULTILINE_MAX.getValue + Utils.tryFinally { + var line = randomAndReversedReadLine() + while (readLine < pageSize && line != null) { + lineNum += 1 + if (skippedLine < fromLine - 1) { + skippedLine += 1 + } else { + if (rowIgnore) { + linePattern match { + case reg: Regex => + if (reg.findFirstIn(line).isDefined) { + ignoreLine = 0 + rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList) + } else { + ignoreLine += 1 + if (ignoreLine >= maxMultiline) { + rowIgnore = false + } + } + case _ => rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList) + } + } else { + rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList) + } + if (!rowIgnore) { + logs.add(line) + readLine += 1 + } + } + line = randomAndReversedReadLine() + } + } { + IOUtils.closeQuietly(randomReader) + IOUtils.closeQuietly(reversedReader) + } + if (enableTail) Collections.reverse(logs) + Map("logPath" -> logPath.getPath, "logs" -> logs, "endLine" -> lineNum, "rows" -> readLine) + } + + protected def getLogPath(implicit parameters: Map[String, Any]): File = { + val (ticketId, engineConnInstance, engineConnLogDir) = getEngineConnInfo(parameters) + val logPath = + new File(engineConnLogDir, getAs("logType", EngineConnLogOperator.LOG_FILE_NAME.getValue)); + if (!logPath.exists() || !logPath.isFile) { + throw new ECMErrorException( + LOGFILE_IS_NOT_EXISTS.getErrorCode, + MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath) + ) + } + logger.info( + s"Try to fetch EngineConn(id: $ticketId, instance: $engineConnInstance) logs from ${logPath.getPath}." + ) + logPath + } + + protected def getLinePattern: Regex = { + Option(EngineConnLogOperator.MULTILINE_PATTERN.getValue) match { + case Some(pattern) => pattern.r + case _ => null + } + } + + protected def getEngineConnInfo(implicit + parameters: Map[String, Any] + ): (String, String, String) = { + val logDIrSuffix = getAs("logDirSuffix", "") + val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix + val ticketId = getAs("ticketId", "") + (ticketId, "", ecLogPath) + } + + private def includeLine( + line: String, + onlyKeywordList: Array[String], + ignoreKeywordList: Array[String] + ): Boolean = { + var accept: Boolean = ignoreKeywordList.isEmpty || !ignoreKeywordList.exists(line.contains) + if (accept) { + accept = onlyKeywordList.isEmpty || onlyKeywordList.exists(line.contains) + } + accept + } + +} + +object EngineConnLogOperator { + val OPERATOR_NAME = "engineConnLog" + val LOG_FILE_NAME = CommonVars("linkis.engineconn.log.filename", "stdout") + val MAX_LOG_FETCH_SIZE = CommonVars("linkis.engineconn.log.fetch.lines.max", 5000) + + val MAX_LOG_TAIL_START_SIZE = CommonVars("linkis.engineconn.log.tail.start.size", 20000) + + // yyyy-MM-dd HH:mm:ss.SSS + val MULTILINE_PATTERN = CommonVars( + "linkis.engineconn.log.multiline.pattern", + "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}" + ) + + val MULTILINE_MAX = CommonVars("linkis.engineconn.log.multiline.max", 500) +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala new file mode 100644 index 0000000000..7d8b5fc671 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala @@ -0,0 +1,194 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.operator + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.io.FsPath +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ +import org.apache.linkis.ecm.server.exception.ECMErrorException +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils + +import org.apache.commons.lang3.StringUtils + +import java.io.File +import java.text.MessageFormat +import java.util +import java.util.concurrent.{Callable, ConcurrentHashMap, ExecutorService, Future, TimeUnit} + +import scala.collection.JavaConverters._ +import scala.util.matching.Regex + +class EngineConnYarnLogOperator extends EngineConnLogOperator { + + private implicit val fs: FileSystem = + FSFactory.getFs(StorageUtils.FILE).asInstanceOf[FileSystem] + + /** + * Yarn log fetchers + */ + private def yarnLogFetchers: ConcurrentHashMap[String, Future[String]] = + new ConcurrentHashMap[String, Future[String]]() + + override def getNames: Array[String] = Array(EngineConnYarnLogOperator.OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + var result: Map[String, Any] = Map() + Utils.tryFinally { + result = super.apply(parameters) + result + } {} + } + + override def getLogPath(implicit parameters: Map[String, Any]): File = { + val applicationId = getAsThrow[String]("yarnApplicationId", parameters) + val (ticketId, engineConnInstance, engineConnLogDir) = getEngineConnInfo(parameters) + val rootLogPath = EngineConnYarnLogOperator.YARN_LOG_STORAGE_PATH.getValue match { + case storePath if StringUtils.isNotBlank(storePath) => + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + storePath + "/" + applicationId) + // Invoke to create directory + fs.mkdir(logPath) + // 777 permission + fs.setPermission(logPath, "rwxrwxrwx") + logPath + case _ => new FsPath(StorageUtils.FILE_SCHEMA + engineConnLogDir) + } + if (!fs.exists(rootLogPath) || !rootLogPath.toFile.isDirectory) { + throw new ECMErrorException( + LOG_IS_NOT_EXISTS.getErrorCode, + MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc, rootLogPath.getPath) + ) + } + val creator = getAsThrow[String]("creator", parameters) + var logPath = new FsPath( + StorageUtils.FILE_SCHEMA + rootLogPath.getPath + "/yarn_" + applicationId + ) + if (!fs.exists(logPath)) { + val fetcher = yarnLogFetchers.computeIfAbsent( + applicationId, + new util.function.Function[String, Future[String]] { + override def apply(v1: String): Future[String] = + requestToFetchYarnLogs(creator, applicationId, rootLogPath.getPath) + } + ) + // Just wait 5 seconds + Option(fetcher.get(5, TimeUnit.SECONDS)) match { + case Some(path) => logPath = new FsPath(StorageUtils.FILE_SCHEMA + path) + case _ => + } + + } + if (!fs.exists(logPath) || logPath.toFile.isDirectory) { + throw new ECMErrorException( + LOGFILE_IS_NOT_EXISTS.getErrorCode, + MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath.getPath) + ) + } + logger.info( + s"Try to fetch EngineConn(id: $ticketId, instance: $engineConnInstance) yarn logs from ${logPath.getPath} in application id: $applicationId" + ) + logPath.toFile + } + + /** + * Not support line pattern in yarn log + * @return + */ + override protected def getLinePattern: Regex = null + + /** + * Request the log fetcher + * + * @param creator + * creator + * @param applicationId + * application id + * @param logPath + * log path + * @return + */ + private def requestToFetchYarnLogs( + creator: String, + applicationId: String, + yarnLogDir: String + ): Future[String] = { + EngineConnYarnLogOperator.YARN_LOG_FETCH_SCHEDULER.submit(new Callable[String] { + override def call(): String = { + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + yarnLogDir + "/yarn_" + applicationId) + if (!fs.exists(logPath)) { + val tempLogFile = + s".yarn_${applicationId}_${System.currentTimeMillis()}_${Thread.currentThread().getId}" + Utils.tryCatch { + var command = + s"yarn logs -applicationId $applicationId >> $yarnLogDir/$tempLogFile" + logger.info(s"Fetch yarn logs to temporary file: [$command]") + val processBuilder = new ProcessBuilder(sudoCommands(creator, command): _*) + processBuilder.environment.putAll(sys.env.asJava) + processBuilder.redirectErrorStream(false) + val process = processBuilder.start() + val exitCode = process.waitFor() + logger.trace(s"Finish to fetch yan logs to temporary file, result: ${exitCode}") + if (exitCode == 0) { + command = s"mv $yarnLogDir/$tempLogFile $yarnLogDir/yarn_$applicationId" + logger.info(s"Move and save yarn logs(${applicationId}): [$command]") + Utils.exec(sudoCommands(creator, command)) + } + } { e: Throwable => + logger.error( + s"Fail to fetch yarn logs application: $applicationId, message: ${e.getMessage}" + ) + } + val tmpFile = new File(yarnLogDir, tempLogFile) + if (tmpFile.exists()) { + logger.info(s"Delete temporary file: [${tempLogFile}] in yarn logs fetcher") + tmpFile.delete() + } + } + // Remove future + yarnLogFetchers.remove(applicationId) + if (fs.exists(logPath)) logPath.getPath else null + } + }) + } + + private def sudoCommands(creator: String, command: String): Array[String] = { + Array( + "/bin/bash", + "-c", + "sudo su " + creator + " -c \"source ~/.bashrc 2>/dev/null; " + command + "\"" + ) + } + +} + +object EngineConnYarnLogOperator { + val OPERATOR_NAME = "engineConnYarnLog" + + // Specific the path to store the yarn logs + val YARN_LOG_STORAGE_PATH: CommonVars[String] = + CommonVars("linkis.engineconn.log.yarn.storage-path", "") + + val YARN_LOG_FETCH_THREAD: CommonVars[Int] = + CommonVars("linkis.engineconn.log.yarn.fetch.thread-num", 5) + + val YARN_LOG_FETCH_SCHEDULER: ExecutorService = + Utils.newFixedThreadPool(YARN_LOG_FETCH_THREAD.getValue + 1, "yarn_logs_fetch", false) + +} diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala similarity index 91% rename from linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java rename to linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala index 11a592924a..62ae7c1fea 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.cs.client.test.restful; +package org.apache.linkis.ecm.server.service -public class ListenerTest {} +trait ECMMetricsService {} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala index 390822df0d..df00ed4960 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala @@ -121,11 +121,10 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w Sender .getSender(MANAGER_SERVICE_NAME) .send( - new EngineConnStatusCallbackToAM( + EngineConnStatusCallbackToAM( conn.getServiceInstance, NodeStatus.Failed, - " wait init failed , reason " + ExceptionUtils.getRootCauseMessage(t), - true + " wait init failed , reason " + ExceptionUtils.getRootCauseMessage(t) ) ) conn.setStatus(NodeStatus.Failed) @@ -147,12 +146,10 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w throw t } LoggerUtils.removeJobIdMDC() - val label = LabelUtil.getEngingeConnRuntimeModeLabel(request.labels) val isYarnClusterMode: Boolean = if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) true else false - val engineNode = new AMEngineNode() engineNode.setLabels(conn.getLabels) engineNode.setServiceInstance(conn.getServiceInstance) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala index e334a8ac29..ed1490294c 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala @@ -120,6 +120,8 @@ class BmlResourceLocalizationService extends ResourceLocalizationService with Lo } } + private val bmlResourceSuffix = ".zip" + private def createDirIfNotExit(noSchemaPath: String): String = { val fsPath = new FsPath(schema + noSchemaPath) if (!fs.exists(fsPath)) { diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/IRServiceGroupProtocol.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala similarity index 71% rename from linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/IRServiceGroupProtocol.scala rename to linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala index 675dc0c830..5d7da6b91e 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/IRServiceGroupProtocol.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala @@ -15,13 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.protocol +package org.apache.linkis.ecm.server.service.impl -trait IRServiceGroupProtocol extends IRProtocol with InstanceProtocol { - val userWithCreator: UserWithCreator +import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} +import org.apache.linkis.ecm.server.service.ECMMetricsService - def user: String = userWithCreator.user - def creator: String = userWithCreator.creator +class DefaultECMMetricsService extends ECMMetricsService with ECMEventListener { + override def onEvent(event: ECMEvent): Unit = {} } - -case class UserWithCreator(user: String, creator: String) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala index d88f270862..4c7807dad1 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala @@ -17,7 +17,6 @@ package org.apache.linkis.ecm.server.service.impl -import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} import org.apache.linkis.ecm.server.conf.ECMConfiguration._ @@ -31,7 +30,6 @@ import org.apache.linkis.manager.common.protocol.em.{ StopEMRequest } import org.apache.linkis.manager.label.constant.LabelKeyConstant -import org.apache.linkis.manager.label.entity.SerializableLabel import org.apache.linkis.rpc.Sender import java.util @@ -52,25 +50,17 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener request } - def getLabelsFromArgs(params: Array[String]): util.Map[String, AnyRef] = { + private def getLabelsFromArgs(params: Array[String]): util.Map[String, AnyRef] = { import scala.collection.JavaConverters._ - val labelRegex = """label\.(.+)\.(.+)=(.+)""".r val labels = new util.HashMap[String, AnyRef]() - // TODO: magic labels.asScala += LabelKeyConstant.SERVER_ALIAS_KEY -> Collections.singletonMap( "alias", ENGINE_CONN_MANAGER_SPRING_NAME ) - - if (Configuration.IS_MULTIPLE_YARN_CLUSTER.getValue.asInstanceOf[Boolean]) { - labels.asScala += LabelKeyConstant.YARN_CLUSTER_KEY -> - (ECM_YARN_CLUSTER_TYPE + "_" + ECM_YARN_CLUSTER_NAME) - } - // TODO: group by key labels } - def getEMRegiterResourceFromConfiguration: NodeResource = { + private def getEMRegiterResourceFromConfiguration: NodeResource = { val maxResource = new LoadInstanceResource( ECMUtils.inferDefaultMemory(), ECM_MAX_CORES_AVAILABLE, @@ -107,11 +97,9 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener logger.info("start register ecm") val response = Sender.getSender(MANAGER_SERVICE_NAME).ask(request) response match { - case registerEMResponse: RegisterEMResponse => - if (!registerEMResponse.getIsSuccess) { - logger.error( - s"Failed to register info to linkis manager, reason: ${registerEMResponse.getMsg}" - ) + case RegisterEMResponse(isSuccess, msg) => + if (!isSuccess) { + logger.error(s"Failed to register info to linkis manager, reason: $msg") System.exit(1) } case _ => diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala index 12b813d67f..3a541df8b9 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala @@ -27,34 +27,26 @@ import org.apache.commons.lang3.exception.ExceptionUtils import org.springframework.stereotype.Service -import java.util +import scala.collection.JavaConverters.mapAsScalaMapConverter @Service class DefaultOperateService extends OperateService with Logging { @Receiver override def executeOperation(ecmOperateRequest: ECMOperateRequest): ECMOperateResponse = { - val parameters = ecmOperateRequest.getParameters() - val operator = Utils.tryCatch(OperatorFactory.apply().getOperatorRequest(parameters)) { t => - logger.error(s"Get operator failed, parameters is ${ecmOperateRequest.getParameters}.", t) - return new ECMOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + val parameters = ecmOperateRequest.parameters.asScala.toMap + val operator = Utils.tryCatch(OperatorFactory().getOperatorRequest(parameters)) { t => + logger.error(s"Get operator failed, parameters is ${ecmOperateRequest.parameters}.", t) + return ECMOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) } logger.info( - s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${ecmOperateRequest.getParameters}." + s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${ecmOperateRequest.parameters}." ) val result = Utils.tryCatch(operator(parameters)) { t => logger.error(s"Execute ${operator.getClass.getSimpleName} failed.", t) - return new ECMOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + return ECMOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) } - new ECMOperateResponse(result) + ECMOperateResponse(result) } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala index 91f31e5548..764a704887 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala @@ -17,7 +17,6 @@ package org.apache.linkis.ecm.server.service.impl -import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.Logging import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} import org.apache.linkis.ecm.server.listener.EngineConnStopEvent diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala index 360bca269d..b20590f04a 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala @@ -26,6 +26,7 @@ import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME import org.apache.linkis.ecm.server.listener.EngineConnStopEvent import org.apache.linkis.ecm.server.service.LocalDirsHandleService +import org.apache.linkis.ecm.utils.ECMCacheUtils import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus @@ -90,20 +91,36 @@ abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchSe ) } if (exitCode != 0) { - val canRetry = if (errorMsg.isEmpty) true else false - logger.warn( - s"Failed to start ec ${engineConn.getServiceInstance}, status shutting down exit code ${exitCode}, canRetry ${canRetry}, logPath ${logPath}" - ) - Sender - .getSender(MANAGER_SERVICE_NAME) - .send( - new EngineConnStatusCallbackToAM( - engineConn.getServiceInstance, - NodeStatus.ShuttingDown, - "Failed to start EngineConn, reason: " + errorMsg + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", - canRetry + val stopRequest = ECMCacheUtils.getStopEC(engineConn.getServiceInstance) + if ( + null != stopRequest && engineConn.getPid != null && engineConn.getPid.equals( + stopRequest.getIdentifier ) + ) { + logger.info( + s"EC ${engineConn.getServiceInstance} exit should by kill stop request $stopRequest, do not report status" + ) + } else { + val canRetry = + if (errorMsg.isEmpty || ECMConfiguration.EC_CAN_RETRY_EXIT_CODES.contains(exitCode)) { + true + } else { + false + } + logger.warn( + s"Failed to start ec ${engineConn.getServiceInstance}, status shutting down exit code ${exitCode}, canRetry ${canRetry}, logPath ${logPath}" ) + Sender + .getSender(MANAGER_SERVICE_NAME) + .send( + EngineConnStatusCallbackToAM( + engineConn.getServiceInstance, + NodeStatus.ShuttingDown, + "Failed to start EngineConn, reason: " + errorMsg + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", + canRetry + ) + ) + } engineConn.setStatus(NodeStatus.ShuttingDown) } else { engineConn.setStatus(NodeStatus.Success) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala index 08addb94c1..2a50b40405 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala @@ -102,8 +102,7 @@ object ECMUtils extends Logging { // if enable estimate actual memory if (ECM_STIMATE_ACTUAL_MEMORY_ENABLE) { - // 90% - val totalByte = (HardwareUtils.getMaxMemory() * 0.9).asInstanceOf[Long] + val totalByte = HardwareUtils.getMaxMemory() val resultMemory = math.max(totalByte, ECM_PROTECTED_MEMORY) // max of PhysicalMemory or ECM_PROTECTED_MEMORY diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java deleted file mode 100644 index f65fd68780..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl; - -import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.manager.common.protocol.em.RegisterEMRequest; -import org.apache.linkis.manager.label.constant.LabelKeyConstant; -import org.apache.linkis.rpc.serializer.ProtostuffSerializeUtil; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.junit.jupiter.api.Test; - -import static org.apache.linkis.manager.label.conf.LabelCommonConfig.ENGINE_CONN_MANAGER_SPRING_NAME; - -public class DefaultECMRegisterServiceTest { - @Test - void testECM() { - DefaultECMRegisterService defaultECMRegisterService = new DefaultECMRegisterService(); - RegisterEMRequest request = new RegisterEMRequest(); - ServiceInstance instance = new ServiceInstance(); - instance.setInstance("127.0.0.1:9001"); - instance.setApplicationName("ecm"); - request.setUser("hadoop"); - request.setServiceInstance(instance); - request.setAlias(instance.getApplicationName()); - - Map labels = new HashMap<>(); - labels.put( - LabelKeyConstant.SERVER_ALIAS_KEY, - Collections.singletonMap("alias", ENGINE_CONN_MANAGER_SPRING_NAME)); - request.setLabels(defaultECMRegisterService.getLabelsFromArgs(null)); - // the ECMUtils.inferDefaultMemory() will throw error disable the test - // request.setNodeResource(defaultECMRegisterService.getEMRegiterResourceFromConfiguration()); - String res = ProtostuffSerializeUtil.serialize(request); - ProtostuffSerializeUtil.deserialize(res, RegisterEMRequest.class); - } -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java deleted file mode 100644 index 3f98e6a988..0000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.once.executor.operator; - -import org.apache.linkis.engineconn.common.exception.EngineConnException; -import org.apache.linkis.engineconn.once.executor.OnceExecutor; -import org.apache.linkis.engineconn.once.executor.OperableOnceExecutor; -import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager$; -import org.apache.linkis.manager.common.operator.Operator; -import org.apache.linkis.manager.common.operator.OperatorFactory; -import org.apache.linkis.protocol.engine.JobProgressInfo; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class OperableOnceEngineConnOperator implements Operator { - - public static final String PROGRESS_OPERATOR_NAME = "engineConnProgress"; - - public static final String METRICS_OPERATOR_NAME = "engineConnMetrics"; - - public static final String DIAGNOSIS_OPERATOR_NAME = "engineConnDiagnosis"; - - @Override - public String[] getNames() { - return new String[] {PROGRESS_OPERATOR_NAME, METRICS_OPERATOR_NAME, DIAGNOSIS_OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - String operatorName = OperatorFactory.apply().getOperatorName(parameters); - OnceExecutor reportExecutor = OnceExecutorManager$.MODULE$.getInstance().getReportExecutor(); - if (reportExecutor instanceof OperableOnceExecutor) { - OperableOnceExecutor operableOnceExecutor = (OperableOnceExecutor) reportExecutor; - switch (operatorName) { - case PROGRESS_OPERATOR_NAME: - List> progressInfoMap = new ArrayList<>(); - JobProgressInfo[] progressInfoList = operableOnceExecutor.getProgressInfo(); - if (progressInfoList != null && progressInfoList.length != 0) { - for (JobProgressInfo progressInfo : progressInfoList) { - Map infoMap = new HashMap<>(); - infoMap.put("id", progressInfo.id()); - infoMap.put("totalTasks", progressInfo.totalTasks()); - infoMap.put("runningTasks", progressInfo.runningTasks()); - infoMap.put("failedTasks", progressInfo.failedTasks()); - infoMap.put("succeedTasks", progressInfo.succeedTasks()); - progressInfoMap.add(infoMap); - } - } - Map resultMap = new HashMap<>(); - resultMap.put("progress", operableOnceExecutor.getProgress()); - resultMap.put("progressInfo", progressInfoMap); - return resultMap; - case METRICS_OPERATOR_NAME: - return new HashMap() { - { - put("metrics", operableOnceExecutor.getMetrics()); - } - }; - case DIAGNOSIS_OPERATOR_NAME: - return new HashMap() { - { - put("diagnosis", operableOnceExecutor.getDiagnosis()); - } - }; - default: - throw new EngineConnException( - 20308, "This engineConn don't support " + operatorName + " operator."); - } - } else { - throw new EngineConnException( - 20308, "This engineConn don't support " + operatorName + " operator."); - } - } -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala index 5d66212069..c25d2f580c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala @@ -22,7 +22,7 @@ import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.executor.ExecutorExecutionContext import org.apache.linkis.governance.common.entity.job.OnceExecutorContent -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriterFactory} +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriter} class OnceExecutorExecutionContext( engineCreationContext: EngineCreationContext, @@ -40,14 +40,14 @@ class OnceExecutorExecutionContext( ): ResultSet[_ <: MetaData, _ <: Record] = resultSetFactory.getResultSetByType(resultSetType) - override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType()(0) + override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType(0) override protected def newResultSetWriter( resultSet: ResultSet[_ <: MetaData, _ <: Record], resultSetPath: FsPath, alias: String - ): org.apache.linkis.common.io.resultset.ResultSetWriter[_ <: MetaData, _ <: Record] = - ResultSetWriterFactory.getResultSetWriter( + ): ResultSetWriter[_ <: MetaData, _ <: Record] = + ResultSetWriter.getResultSetWriter( resultSet, 0, resultSetPath, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala new file mode 100644 index 0000000000..ae1828d1c0 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.once.executor.operator + +import org.apache.linkis.engineconn.common.exception.EngineConnException +import org.apache.linkis.engineconn.once.executor.OperableOnceExecutor +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager +import org.apache.linkis.manager.common.operator.{Operator, OperatorFactory} + +class OperableOnceEngineConnOperator extends Operator { + + import OperableOnceEngineConnOperator._ + + override def getNames: Array[String] = + Array(PROGRESS_OPERATOR_NAME, METRICS_OPERATOR_NAME, DIAGNOSIS_OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + val operatorName = OperatorFactory().getOperatorName(parameters) + OnceExecutorManager.getInstance.getReportExecutor match { + case operableOnceExecutor: OperableOnceExecutor => + operatorName match { + case PROGRESS_OPERATOR_NAME => + val progressInfo = operableOnceExecutor.getProgressInfo + val progressInfoMap = if (progressInfo != null && progressInfo.nonEmpty) { + progressInfo.map(progressInfo => + Map( + "id" -> progressInfo.id, + "totalTasks" -> progressInfo.totalTasks, + "runningTasks" -> progressInfo.runningTasks, + "failedTasks" -> progressInfo.failedTasks, + "succeedTasks" -> progressInfo.succeedTasks + ) + ) + } else Array.empty[Map[String, Any]] + Map("progress" -> operableOnceExecutor.getProgress, "progressInfo" -> progressInfoMap) + case METRICS_OPERATOR_NAME => + Map("metrics" -> operableOnceExecutor.getMetrics) + case DIAGNOSIS_OPERATOR_NAME => + Map("diagnosis" -> operableOnceExecutor.getDiagnosis) + case _ => + throw EngineConnException( + 20308, + s"This engineConn don't support $operatorName operator." + ) + } + case _ => + throw EngineConnException(20308, s"This engineConn don't support $operatorName operator.") + } + } + +} + +object OperableOnceEngineConnOperator { + val PROGRESS_OPERATOR_NAME = "engineConnProgress" + val METRICS_OPERATOR_NAME = "engineConnMetrics" + val DIAGNOSIS_OPERATOR_NAME = "engineConnDiagnosis" +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java index a84f581153..21d28e2d9e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java @@ -55,7 +55,7 @@ public class TimingMonitorService implements InitializingBean, Runnable { @Override public void afterPropertiesSet() throws Exception { - if ((Boolean) AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM().getValue()) { + if ((Boolean) (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM().getValue())) { Utils.defaultScheduler() .scheduleAtFixedRate( this, 3 * 60 * 1000, MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS); @@ -77,7 +77,7 @@ public void run() { } } if (null == concurrentExecutor) { - LOG.warn("shell executor can not is null"); + LOG.warn("Executor can not is null"); return; } isAvailable = true; @@ -96,7 +96,7 @@ public void run() { } else { if (concurrentExecutor.isIdle()) synchronized (EXECUTOR_STATUS_LOCKER) { - LOG.info("monitor turn to executor status from busy to unlock"); + LOG.info("monitor turn to executor status from unlock to busy"); concurrentExecutor.transition(NodeStatus.Busy); } } diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextValue.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java similarity index 61% rename from linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextValue.java rename to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java index 0ad3b675fb..4d1fbbfe40 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextValue.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java @@ -15,33 +15,23 @@ * limitations under the License. */ -package org.apache.linkis.cs.client.test.bean; +package org.apache.linkis.engineconn.computation.executor.exception; -import org.apache.linkis.cs.common.entity.source.ContextValue; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; -public class ClientTestContextValue implements ContextValue { +public class HookExecuteException extends LinkisRuntimeException { - private Object value; - - private String keywords; - - @Override - public String getKeywords() { - return this.keywords; + public HookExecuteException(int errCode, String desc) { + super(errCode, desc); } - @Override - public void setKeywords(String keywords) { - this.keywords = keywords; - } - - @Override - public Object getValue() { - return this.value; + public HookExecuteException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); } @Override - public void setValue(Object value) { - this.value = value; + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala index cf8b9c00b8..9af394da80 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala @@ -20,7 +20,10 @@ package org.apache.linkis.engineconn.computation.executor.async import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskResponseErrorEvent +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.{ @@ -29,13 +32,16 @@ import org.apache.linkis.engineconn.computation.executor.execute.{ } import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor import org.apache.linkis.engineconn.executor.listener.{ EngineConnSyncListenerBus, ExecutorListenerBusContext } import org.apache.linkis.governance.common.entity.ExecutionNodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.listener.JobListener @@ -48,6 +54,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util import java.util.concurrent.ConcurrentHashMap +import DataWorkCloudApplication.getApplicationContext + abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: Int = 1000) extends ComputationExecutor(outputPrintLimit) with ConcurrentExecutor @@ -97,9 +105,6 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: }) } { e => logger.info("failed to do with hook", e) - engineExecutionContext.appendStdout( - LogUtils.generateWarn(s"failed execute hook: ${ExceptionUtils.getStackTrace(e)}") - ) } if (hookedCode.length > 100) { logger.info(s"hooked after code: ${hookedCode.substring(0, 100)} ....") @@ -207,6 +212,7 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: s"Executor is busy but still got new task ! Running task num : ${getRunningTask}" ) } + runningTasks.increase() if (getRunningTask >= getConcurrentLimit) synchronized { if (getRunningTask >= getConcurrentLimit && NodeStatus.isIdle(getStatus)) { logger.info( @@ -215,13 +221,25 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: transition(NodeStatus.Busy) } } - runningTasks.increase() } override def onJobCompleted(job: Job): Unit = { + runningTasks.decrease() job match { case asyncEngineConnJob: AsyncEngineConnJob => + val jobId = JobUtils.getJobIdFromMap(asyncEngineConnJob.getEngineConnTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) + + if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized { + if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { + logger.info( + s"running task($getRunningTask) < concurrent limit $getConcurrentLimit, now to mark engine to Unlock " + ) + transition(NodeStatus.Unlock) + } + } + job.getState match { case Succeed => succeedTasks.increase() @@ -241,22 +259,42 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: } removeJob(asyncEngineConnJob.getEngineConnTask.getTaskId) clearTaskCache(asyncEngineConnJob.getEngineConnTask.getTaskId) - + // execute once should try to shutdown + if ( + asyncEngineConnJob.getEngineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel]) + ) { + if (!hasTaskRunning()) { + logger.warn( + s"engineConnTask(${asyncEngineConnJob.getEngineConnTask.getTaskId}) is execute once, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } + // unhealthy node should try to shutdown + if (!hasTaskRunning() && currentEngineIsUnHealthy()) { + logger.info( + s"engineConnTask(${asyncEngineConnJob.getEngineConnTask.getTaskId}) is unHealthy, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + LoggerUtils.setJobIdMDC(jobId) case _ => } - if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized { - if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { - logger.info( - s"running task($getRunningTask) < concurrent limit $getConcurrentLimit, now to mark engine to Unlock " - ) - transition(NodeStatus.Unlock) - } - } } override def hasTaskRunning(): Boolean = { getRunningTask > 0 } + override def transition(toStatus: NodeStatus): Unit = { + if (getRunningTask >= getConcurrentLimit && NodeStatus.Unlock == toStatus) { + logger.info( + s"running task($getRunningTask) > concurrent limit:$getConcurrentLimit, can not to mark EC to Unlock" + ) + return + } + super.transition(toStatus) + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala index 3959eb942b..98f04daaa2 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.computation.executor.bml import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook @@ -62,7 +63,7 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { ): String = { val props = engineExecutionContext.getProperties if (null != props && props.containsKey(GovernanceConstant.TASK_RESOURCES_STR)) { - val workDir = ComputationEngineUtils.getCurrentWorkDir +// val workDir = ComputationEngineUtils.getCurrentWorkDir val jobId = engineExecutionContext.getJobId props.get(GovernanceConstant.TASK_RESOURCES_STR) match { case resources: util.List[Object] => @@ -71,9 +72,9 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { val fileName = resource.get(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR).toString val resourceId = resource.get(GovernanceConstant.TASK_RESOURCE_ID_STR).toString val version = resource.get(GovernanceConstant.TASK_RESOURCE_VERSION_STR).toString - val fullPath = - if (workDir.endsWith(seperator)) pathType + workDir + fileName - else pathType + workDir + seperator + fileName + val fullPath = fileName +// if (workDir.endsWith(seperator)) pathType + workDir + fileName +// else pathType + workDir + seperator + fileName val response = Utils.tryCatch { bmlClient.downloadShareResource(processUser, resourceId, version, fullPath, true) } { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala index c072c32794..a3f7bb49e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala @@ -33,6 +33,12 @@ object ComputationExecutorConf { "Maximum number of tasks executed by the synchronization EC" ) + val PRINT_TASK_PARAMS_SKIP_KEYS = CommonVars( + "linkis.engineconn.print.task.params.skip.keys", + "jobId", + "skip to print params key at job logs" + ) + val ENGINE_PROGRESS_FETCH_INTERVAL = CommonVars( "wds.linkis.engineconn.progresss.fetch.interval-in-seconds", @@ -54,7 +60,7 @@ object ComputationExecutorConf { ).getValue val ENGINE_CONCURRENT_THREAD_NUM = CommonVars( - "wds.linkis.engineconn.concurrent.thread.num", + "linkis.engineconn.concurrent.thread.num", 20, "Maximum thread pool of the concurrent EC" ) @@ -124,4 +130,10 @@ object ComputationExecutorConf { val ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH = CommonVars("linkis.ec.send.log.entrance.limit.length", 2000) + val ENGINE_KERBEROS_AUTO_REFRESH_ENABLED = + CommonVars("linkis.ec.kerberos.auto.refresh.enabled", false).getValue + + val CLOSE_RS_OUTPUT_WHEN_RESET_BY_DEFAULT_ENABLED = + CommonVars("linkis.ec.rs.close.when.reset.enabled", true).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala index f59adaadef..fe98e3328e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala @@ -17,7 +17,10 @@ package org.apache.linkis.engineconn.computation.executor.cs +import org.apache.linkis.common.utils.Logging import org.apache.linkis.cs.client.service.CSResourceService +import org.apache.linkis.engineconn.common.conf.EngineConnConf +import org.apache.linkis.governance.common.utils.GovernanceConstant import org.apache.commons.lang3.StringUtils @@ -27,7 +30,7 @@ import java.util.regex.Pattern import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer -class CSResourceParser { +class CSResourceParser extends Logging { private val pb = Pattern.compile("cs://[^\\s\"]+[$\\s]{0,1}", Pattern.CASE_INSENSITIVE) @@ -47,7 +50,6 @@ class CSResourceParser { nodeNameStr: String ): String = { - // TODO getBMLResource val bmlResourceList = CSResourceService.getInstance().getUpstreamBMLResource(contextIDValueStr, nodeNameStr) @@ -56,23 +58,25 @@ class CSResourceParser { val preFixNames = new ArrayBuffer[String]() val parsedNames = new ArrayBuffer[String]() + val prefixName = System.currentTimeMillis().toString + "_" preFixResourceNames.foreach { preFixResourceName => val resourceName = preFixResourceName.replace(PREFIX, "").trim val bmlResourceOption = bmlResourceList.asScala.find(_.getDownloadedFileName.equals(resourceName)) if (bmlResourceOption.isDefined) { + val replacementName = EngineConnConf.getEngineTmpDir + prefixName + resourceName val bmlResource = bmlResourceOption.get val map = new util.HashMap[String, Object]() - map.put("resourceId", bmlResource.getResourceId) - map.put("version", bmlResource.getVersion) - map.put("fileName", resourceName) + map.put(GovernanceConstant.TASK_RESOURCE_ID_STR, bmlResource.getResourceId) + map.put(GovernanceConstant.TASK_RESOURCE_VERSION_STR, bmlResource.getVersion) + map.put(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR, replacementName) parsedResources.add(map) preFixNames.append(preFixResourceName) - parsedNames.append(resourceName) + parsedNames.append(replacementName) + logger.warn(s"Replace cs file from {$preFixResourceName} to {$replacementName}") } - } - props.put("resources", parsedResources) + props.put(GovernanceConstant.TASK_RESOURCES_STR, parsedResources) StringUtils.replaceEach(code, preFixNames.toArray, parsedNames.toArray) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala index 8b9f95d66d..cc899cece4 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala @@ -88,7 +88,7 @@ object CSTableRegister extends Logging { val csColumns = columns.map { column => val csColumn = new CSColumn csColumn.setName(column.columnName) - csColumn.setType(column.dataType.getTypeName) + csColumn.setType(column.dataType.typeName) csColumn.setComment(column.comment) csColumn } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala index 891f37f27e..163d7cf57e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala @@ -67,7 +67,7 @@ class CSTableResultSetWriter( val csColumns = getMetaData.asInstanceOf[TableMetaData].columns.map { column => val csColumn = new CSColumn csColumn.setName(column.columnName) - csColumn.setType(column.dataType.getTypeName) + csColumn.setType(column.dataType.typeName) csColumn.setComment(column.comment) csColumn } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala index 940973be61..592d225a34 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala @@ -21,13 +21,17 @@ import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager import org.apache.linkis.engineconn.acessible.executor.listener.event.{ + TaskLogUpdateEvent, TaskResponseErrorEvent, TaskStatusChangedEvent } +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask +import org.apache.linkis.engineconn.computation.executor.exception.HookExecuteException import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.computation.executor.metrics.ComputationEngineConnMetrics import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent @@ -35,12 +39,20 @@ import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor} import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext +import org.apache.linkis.governance.common.constant.job.JobRequestConstants import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.paser.CodeParser import org.apache.linkis.governance.common.protocol.task.{EngineConcurrentInfo, RequestTask} import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.engine.{ + CodeLanguageLabel, + EngineType, + EngineTypeLabel, + RunType, + UserCreatorLabel +} +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer._ @@ -50,6 +62,9 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger +import scala.collection.JavaConverters._ + +import DataWorkCloudApplication.getApplicationContext import com.google.common.cache.{Cache, CacheBuilder} abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) @@ -60,6 +75,7 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) private val listenerBusContext = ExecutorListenerBusContext.getExecutorListenerBusContext() + // private val taskMap: util.Map[String, EngineConnTask] = new ConcurrentHashMap[String, EngineConnTask](8) private val taskCache: Cache[String, EngineConnTask] = CacheBuilder .newBuilder() .expireAfterAccess(EngineConnConf.ENGINE_TASK_EXPIRE_TIME.getValue, TimeUnit.MILLISECONDS) @@ -74,13 +90,21 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) protected val runningTasks: Count = new Count + protected val pendingTasks: Count = new Count + protected val succeedTasks: Count = new Count protected val failedTasks: Count = new Count - private var lastTask: EngineConnTask = _ + protected var lastTask: EngineConnTask = _ - private val MAX_TASK_EXECUTE_NUM = ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue + private val MAX_TASK_EXECUTE_NUM = if (null != EngineConnObject.getEngineCreationContext) { + ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue( + EngineConnObject.getEngineCreationContext.getOptions + ) + } else { + ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue + } private val CLOSE_LOCKER = new Object @@ -132,6 +156,12 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) override def close(): Unit = { if (null != lastTask) CLOSE_LOCKER.synchronized { + listenerBusContext.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + lastTask.getTaskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) killTask(lastTask.getTaskId) } else { @@ -140,6 +170,8 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) super.close() } + // override def getName: String = ComputationExecutorConf.DEFAULT_COMPUTATION_NAME + protected def ensureOp[A](f: => A): A = if (!isEngineInitialized) { f } else ensureIdle(f) @@ -150,6 +182,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) engineConnTask: EngineConnTask, executeResponse: ExecuteResponse ): Unit = { + Utils.tryAndWarn { + ComputationExecutorHook.getComputationExecutorHooks.foreach { hook => + hook.afterExecutorExecute(engineConnTask, executeResponse) + } + } val executorNumber = getSucceedNum + getFailedNum if ( MAX_TASK_EXECUTE_NUM > 0 && runningTasks @@ -158,6 +195,13 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) logger.error(s"Task has reached max execute number $MAX_TASK_EXECUTE_NUM, now tryShutdown. ") ExecutorManager.getInstance.getReportExecutor.tryShutdown() } + + // unhealthy node should try to shutdown + if (runningTasks.getCount() == 0 && currentEngineIsUnHealthy) { + logger.info("no task running and ECNode is unHealthy, now to mark engine to Finished.") + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } def toExecuteTask( @@ -169,25 +213,47 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) Utils.tryFinally { transformTaskStatus(engineConnTask, ExecutionNodeStatus.Running) val engineExecutionContext = createEngineExecutionContext(engineConnTask) + + val engineCreationContext = EngineConnObject.getEngineCreationContext + var hookedCode = engineConnTask.getCode Utils.tryCatch { - val engineCreationContext = EngineConnObject.getEngineCreationContext ComputationExecutorHook.getComputationExecutorHooks.foreach(hook => { hookedCode = hook.beforeExecutorExecute(engineExecutionContext, engineCreationContext, hookedCode) }) - }(e => logger.info("failed to do with hook", e)) + } { e => + e match { + case hookExecuteException: HookExecuteException => + failedTasks.increase() + logger.error("failed to do with hook", e) + return ErrorExecuteResponse("hook execute failed task will be failed", e) + case _ => logger.info("failed to do with hook", e) + } + } if (hookedCode.length > 100) { logger.info(s"hooked after code: ${hookedCode.substring(0, 100)} ....") } else { logger.info(s"hooked after code: $hookedCode ") } + + // task params log + // spark engine: at org.apache.linkis.engineplugin.spark.executor.SparkEngineConnExecutor.executeLine log special conf + Utils.tryAndWarn { + val engineType = LabelUtil.getEngineType(engineCreationContext.getLabels()) + EngineType.mapStringToEngineType(engineType) match { + case EngineType.HIVE | EngineType.TRINO => printTaskParamsLog(engineExecutionContext) + case _ => + } + } + val localPath = EngineConnConf.getLogDir engineExecutionContext.appendStdout( LogUtils.generateInfo( s"EngineConn local log path: ${DataWorkCloudApplication.getServiceInstance.toString} $localPath" ) ) + var response: ExecuteResponse = null val incomplete = new StringBuilder val codes = @@ -202,11 +268,13 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } val code = codes(index) engineExecutionContext.setCurrentParagraph(index + 1) + response = Utils.tryCatch(if (incomplete.nonEmpty) { executeCompletely(engineExecutionContext, code, incomplete.toString()) } else executeLine(engineExecutionContext, code)) { t => ErrorExecuteResponse(ExceptionUtils.getRootCauseMessage(t), t) } + incomplete ++= code response match { case e: ErrorExecuteResponse => @@ -244,6 +312,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) case s: SuccessExecuteResponse => succeedTasks.increase() s + case incompleteExecuteResponse: IncompleteExecuteResponse => + ErrorExecuteResponse( + s"The task cannot be an incomplete response ${incompleteExecuteResponse.message}", + null + ) case _ => response } response @@ -271,12 +344,14 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) TaskResponseErrorEvent(engineConnTask.getTaskId, errorExecuteResponse.message) ) transformTaskStatus(engineConnTask, ExecutionNodeStatus.Failed) + case _ => logger.warn(s"task get response is $executeResponse") } executeResponse } Utils.tryAndWarn(afterExecute(engineConnTask, response)) logger.info(s"Finished to execute task ${engineConnTask.getTaskId}") + // lastTask = null response } { LoggerUtils.removeJobIdMDC() @@ -319,6 +394,12 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) engineConnTask.getProperties.get(RequestTask.RESULT_SET_STORE_PATH).toString ) } + if (engineConnTask.getProperties.containsKey(JobRequestConstants.ENABLE_DIRECT_PUSH)) { + engineExecutionContext.setEnableDirectPush( + engineConnTask.getProperties.get(JobRequestConstants.ENABLE_DIRECT_PUSH).toString.toBoolean + ) + logger.info(s"Enable direct push in engineTask ${engineConnTask.getTaskId}.") + } logger.info(s"StorePath : ${engineExecutionContext.getStorePath.orNull}.") engineExecutionContext.setJobId(engineConnTask.getTaskId) engineExecutionContext.getProperties.putAll(engineConnTask.getProperties) @@ -335,6 +416,30 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } } + /** + * job task log print task params info + * + * @param engineExecutorContext + * @return + * Unit + */ + + def printTaskParamsLog(engineExecutorContext: EngineExecutionContext): Unit = { + val sb = new StringBuilder + + EngineConnObject.getEngineCreationContext.getOptions.asScala.foreach({ case (key, value) => + // skip log jobId because it corresponding jobid when the ec created + if (!ComputationExecutorConf.PRINT_TASK_PARAMS_SKIP_KEYS.getValue.contains(key)) { + sb.append(s"${key}=${value}\n") + } + }) + + sb.append("\n") + engineExecutorContext.appendStdout( + LogUtils.generateInfo(s"Your job exec with configs:\n${sb.toString()}\n") + ) + } + def transformTaskStatus(task: EngineConnTask, newStatus: ExecutionNodeStatus): Unit = { val oriStatus = task.getStatus logger.info(s"task ${task.getTaskId} from status $oriStatus to new status $newStatus") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala index f192a45254..9584dd847c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala @@ -17,9 +17,15 @@ package org.apache.linkis.engineconn.computation.executor.execute +import org.apache.linkis.DataWorkCloudApplication.getApplicationContext +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.scheduler.executer.ExecuteResponse abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int = 1000) @@ -42,9 +48,7 @@ abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int transition(NodeStatus.Busy) } } - logger.info( - s"engineConnTask(${engineConnTask.getTaskId}) running task is ($getRunningTask) status ${getStatus}" - ) + logger.info(s"engineConnTask(${engineConnTask.getTaskId}) running task is ($getRunningTask) ") val response = super.execute(engineConnTask) if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { EXECUTOR_STATUS_LOCKER.synchronized { @@ -64,10 +68,47 @@ abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int override def afterExecute( engineConnTask: EngineConnTask, executeResponse: ExecuteResponse - ): Unit = {} + ): Unit = { + // execute once should try to shutdown + if (engineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel])) { + if (!hasTaskRunning()) { + logger.warn( + s"engineConnTask(${engineConnTask.getTaskId}) is execute once, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } + // unhealthy node should try to shutdown + if (!hasTaskRunning() && currentEngineIsUnHealthy) { + logger.info("no task running and ECNode is unHealthy, now to mark engine to Finished.") + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } override def hasTaskRunning(): Boolean = { getRunningTask > 0 } + override def transition(toStatus: NodeStatus): Unit = { + if (getRunningTask >= getConcurrentLimit && NodeStatus.Unlock == toStatus) { + logger.info( + s"running task($getRunningTask) > concurrent limit:$getConcurrentLimit, can not to mark EC to Unlock" + ) + return + } + super.transition(toStatus) + } + + override def getConcurrentLimit: Int = { + var maxTaskNum = ComputationExecutorConf.ENGINE_CONCURRENT_THREAD_NUM.getValue - 5 + if (maxTaskNum <= 0) { + logger.error( + s"max task num cannot ${maxTaskNum} < 0, should set linkis.engineconn.concurrent.thread.num > 6" + ) + maxTaskNum = 1 + } + logger.info(s"max task num $maxTaskNum") + maxTaskNum + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala index 377c32c193..55e2b1248b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala @@ -21,12 +21,14 @@ import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.cs.client.utils.ContextServiceUtils +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.listener.event.{ TaskLogUpdateEvent, TaskProgressUpdateEvent, TaskResultCreateEvent, TaskResultSizeCreatedEvent } +import org.apache.linkis.engineconn.acessible.executor.log.LogHelper import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.cs.CSTableResultSetWriter import org.apache.linkis.engineconn.executor.ExecutorExecutionContext @@ -40,7 +42,7 @@ import org.apache.linkis.governance.common.exception.engineconn.EngineConnExecut import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.storage.{LineMetaData, LineRecord} -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriterFactory} +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriter} import org.apache.linkis.storage.resultset.table.TableResultSet import org.apache.commons.io.IOUtils @@ -55,8 +57,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String private val resultSetFactory = ResultSetFactory.getInstance - private var defaultResultSetWriter - : org.apache.linkis.common.io.resultset.ResultSetWriter[_ <: MetaData, _ <: Record] = _ + private var defaultResultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record] = _ private var resultSize = 0 @@ -67,6 +68,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String private var totalParagraph = 0 private var currentParagraph = 0 + private var enableDirectPush = false def getTotalParagraph: Int = totalParagraph @@ -76,6 +78,11 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String def setCurrentParagraph(currentParagraph: Int): Unit = this.currentParagraph = currentParagraph + def setEnableDirectPush(enable: Boolean): Unit = + this.enableDirectPush = enable + + def isEnableDirectPush: Boolean = enableDirectPush + def pushProgress(progress: Float, progressInfo: Array[JobProgressInfo]): Unit = if (!executor.isInternalExecute) { val listenerBus = getEngineSyncListenerBus @@ -88,12 +95,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String * Note: the writer will be closed at the end of the method * @param resultSetWriter */ - def sendResultSet( - resultSetWriter: org.apache.linkis.common.io.resultset.ResultSetWriter[ - _ <: MetaData, - _ <: Record - ] - ): Unit = { + def sendResultSet(resultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record]): Unit = { logger.info("Start to send res to entrance") val fileName = new File(resultSetWriter.toFSPath.getPath).getName val index = if (fileName.indexOf(".") < 0) fileName.length else fileName.indexOf(".") @@ -148,13 +150,13 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String ): ResultSet[_ <: MetaData, _ <: Record] = resultSetFactory.getResultSetByType(resultSetType) - override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType()(0) + override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType(0) def newResultSetWriter( resultSet: ResultSet[_ <: MetaData, _ <: Record], resultSetPath: FsPath, alias: String - ): org.apache.linkis.common.io.resultset.ResultSetWriter[_ <: MetaData, _ <: Record] = { + ): ResultSetWriter[_ <: MetaData, _ <: Record] = { // update by 20200402 resultSet match { case result: TableResultSet => @@ -172,7 +174,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String csWriter.setProxyUser(executorUser) csWriter } else { - ResultSetWriterFactory.getResultSetWriter( + ResultSetWriter.getResultSetWriter( resultSet, ComputationExecutorConf.ENGINE_RESULT_SET_MAX_CACHE.getValue.toLong, resultSetPath, @@ -180,7 +182,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String ) } case _ => - ResultSetWriterFactory.getResultSetWriter( + ResultSetWriter.getResultSetWriter( resultSet, ComputationExecutorConf.ENGINE_RESULT_SET_MAX_CACHE.getValue.toLong, resultSetPath, @@ -194,15 +196,20 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String logger.info(log) } else { var taskLog = log + val limitLength = ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue if ( ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_ENABLED.getValue && - log.length > ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue + log.length > limitLength ) { - taskLog = - s"${log.substring(0, ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue)}..." + taskLog = s"${log.substring(0, limitLength)}..." + logger.info("The log is too long and will be intercepted,log limit length : {}", limitLength) + } + if (!AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { + LogHelper.cacheLog(taskLog) + } else { + val listenerBus = getEngineSyncListenerBus + getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, taskLog))) } - val listenerBus = getEngineSyncListenerBus - getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, taskLog))) } override def close(): Unit = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala index c23e28a3a3..f9164b2cf3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala @@ -19,7 +19,9 @@ package org.apache.linkis.engineconn.computation.executor.hook import org.apache.linkis.common.utils.{ClassUtils, Logging, Utils} import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext +import org.apache.linkis.scheduler.executer.ExecuteResponse import scala.collection.JavaConverters.asScalaSetConverter import scala.collection.mutable.ArrayBuffer @@ -36,6 +38,11 @@ trait ComputationExecutorHook { codeBeforeHook: String ): String = codeBeforeHook + def afterExecutorExecute( + engineConnTask: EngineConnTask, + executeResponse: ExecuteResponse + ): Unit = {} + } object ComputationExecutorHook extends Logging { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala index 78eb007d2c..0cb33cf3d7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala @@ -33,7 +33,6 @@ import org.apache.commons.io.FileUtils import org.apache.commons.lang3.StringUtils import java.io.File -import java.nio.charset.StandardCharsets import java.util abstract class InitSQLHook extends EngineConnHook with Logging { @@ -92,7 +91,7 @@ abstract class InitSQLHook extends EngineConnHook with Logging { logger.info("read file: " + path) val file = new File(path) if (file.exists()) { - FileUtils.readFileToString(file, StandardCharsets.UTF_8) + FileUtils.readFileToString(file) } else { logger.info("file: [" + path + "] doesn't exist, ignore it.") "" diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala new file mode 100644 index 0000000000..34928d8525 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.common.conf.Configuration.IS_VIEW_FS_ENV +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf +import org.apache.linkis.engineconn.computation.executor.execute.{ + ComputationExecutor, + EngineExecutionContext +} +import org.apache.linkis.engineconn.core.engineconn.EngineConnManager +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.apache.linkis.rpc.Sender +import org.apache.linkis.udf.UDFClientConfiguration +import org.apache.linkis.udf.api.rpc.{RequestPythonModuleProtocol, ResponsePythonModuleProtocol} +import org.apache.linkis.udf.entity.PythonModuleInfoVO + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +/** + * The PythonModuleLoad class is designed to load Python modules into the execution environment + * dynamically. This class is not an extension of UDFLoad, but shares a similar philosophy of + * handling dynamic module loading based on user preferences and system configurations. + */ +abstract class PythonModuleLoad extends Logging { + + /** Abstract properties to be defined by the subclass */ + protected val engineType: String + protected val runType: RunType + + protected def getEngineType(): String = engineType + + protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String + + private def queryPythonModuleRpc( + userName: String, + engineType: String + ): java.util.List[PythonModuleInfoVO] = { + val infoList = Sender + .getSender(UDFClientConfiguration.UDF_SERVICE_NAME.getValue) + .ask(RequestPythonModuleProtocol(userName, engineType)) + .asInstanceOf[ResponsePythonModuleProtocol] + .getModulesInfo() + infoList + } + + protected def getLoadPythonModuleCode: Array[String] = { + val engineCreationContext = + EngineConnManager.getEngineConnManager.getEngineConn.getEngineCreationContext + val user = engineCreationContext.getUser + + var infoList: util.List[PythonModuleInfoVO] = + Utils.tryAndWarn(queryPythonModuleRpc(user, getEngineType())) + if (infoList == null) { + logger.info("rpc get info is empty.") + infoList = new util.ArrayList[PythonModuleInfoVO]() + } + + // 替换Viewfs + if (IS_VIEW_FS_ENV.getValue) { + infoList.asScala.foreach { info => + val path = info.getPath + logger.info(s"python path: ${path}") + if (path.startsWith("hdfs") || path.startsWith("viewfs")) { + info.setPath(path.replace("hdfs://", "viewfs://")) + } else { + info.setPath("viewfs://" + path) + } + } + } else { + + infoList.asScala.foreach { info => + val path = info.getPath + logger.info(s"hdfs python path: ${path}") + if (!path.startsWith("hdfs")) { + info.setPath("hdfs://" + path) + } + } + } + + logger.info(s"${user} load python modules: ") + infoList.asScala.foreach(l => logger.info(s"module name:${l.getName}, path:${l.getPath}\n")) + + // 创建加载code + val codes: mutable.Buffer[String] = infoList.asScala + .filter { info => StringUtils.isNotEmpty(info.getPath) } + .map(constructCode) + // 打印codes + val str: String = codes.mkString("\n") + logger.info(s"python codes: $str") + codes.toArray + } + + private def executeFunctionCode(codes: Array[String], executor: ComputationExecutor): Unit = { + if (null == codes || null == executor) { + return + } + codes.foreach { code => + logger.info("Submit function registration to engine, code: " + code) + Utils.tryCatch(executor.executeLine(new EngineExecutionContext(executor), code)) { + t: Throwable => + logger.error("Failed to load python module", t) + null + } + } + } + + /** + * Generate and execute the code necessary for loading Python modules. + * + * @param executor + * An object capable of executing code in the current engine context. + */ + protected def loadPythonModules(labels: Array[Label[_]]): Unit = { + + val codes = getLoadPythonModuleCode + logger.info(s"codes length: ${codes.length}") + if (null != codes && codes.nonEmpty) { + val executor = ExecutorManager.getInstance.getExecutorByLabels(labels) + if (executor != null) { + val className = executor.getClass.getName + logger.info(s"executor class: ${className}") + } else { + logger.error(s"Failed to load python, executor is null") + } + + executor match { + case computationExecutor: ComputationExecutor => + executeFunctionCode(codes, computationExecutor) + case _ => + } + } + logger.info(s"Successful to load python, engineType : ${engineType}") + } + +} + +// Note: The actual implementation of methods like `executeFunctionCode` and `construct diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala new file mode 100644 index 0000000000..80eaa888b8 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel + +abstract class PythonModuleLoadEngineConnHook + extends PythonModuleLoad + with EngineConnHook + with Logging { + + override def afterExecutionExecute( + engineCreationContext: EngineCreationContext, + engineConn: EngineConn + ): Unit = { + Utils.tryAndWarnMsg { + val codeLanguageLabel = new CodeLanguageLabel + codeLanguageLabel.setCodeType(runType.toString) + logger.info(s"engineType: ${engineType}") + val labels = Array[Label[_]](codeLanguageLabel) + loadPythonModules(labels) + }(s"Failed to load Python Modules: ${engineType}") + + } + + override def afterEngineServerStartFailed( + engineCreationContext: EngineCreationContext, + throwable: Throwable + ): Unit = { + logger.error(s"Failed to start Engine Server: ${throwable.getMessage}", throwable) + } + + override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = { + logger.info("Preparing to load Python Module...") + } + + override def beforeExecutionExecute( + engineCreationContext: EngineCreationContext, + engineConn: EngineConn + ): Unit = { + logger.info(s"Before executing command on load Python Module.") + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala new file mode 100644 index 0000000000..0fe554f93d --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.apache.linkis.udf.entity.PythonModuleInfoVO + +/** + * 定义一个用于Spark引擎的Python模块加载与执行挂钩的类 + */ +class PythonSparkEngineHook extends PythonModuleLoadEngineConnHook { + + // 设置engineType属性为"spark",表示此挂钩适用于Spark数据处理引擎 + override val engineType: String = "spark" + + // 设置runType属性为RunType.PYSPARK,表示此挂钩将执行PySpark类型的代码 + override protected val runType: RunType = RunType.PYSPARK + + // 重写constructCode方法,用于根据Python模块信息构造加载模块的代码 + override protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String = { + // 使用pythonModuleInfo的path属性,构造SparkContext.addPyFile的命令字符串 + // 这个命令在PySpark环境中将模块文件添加到所有worker上,以便在代码中可以使用 + val path: String = pythonModuleInfo.getPath + val loadCode = s"sc.addPyFile('${path}')" + logger.info(s"pythonLoadCode: ${loadCode}") + loadCode + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala index 6f31bd25e8..91af2811a6 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala @@ -42,7 +42,6 @@ import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.commons.lang3.StringUtils import java.io.File -import java.nio.charset.StandardCharsets import scala.collection.JavaConverters.asScalaBufferConverter import scala.collection.mutable.ArrayBuffer @@ -119,7 +118,7 @@ abstract class UDFLoad extends Logging { logger.info("read file: " + path) val file = new File(path) if (file.exists()) { - FileUtils.readFileToString(file, StandardCharsets.UTF_8) + FileUtils.readFileToString(file) } else { logger.info("udf file: [" + path + "] doesn't exist, ignore it.") "" diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala index 9ddde065cc..d6887218b7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala @@ -34,7 +34,7 @@ import org.apache.commons.lang3.StringUtils abstract class UseDatabaseEngineHook extends EngineConnHook with Logging { - private val USE_DEFAULT_DB_ENABLE = CommonVars("linkis.bdp.use.default.db.enable", false) + private val USE_DEFAULT_DB_ENABLE = CommonVars("wds.linkis.bdp.use.default.db.enable", true) override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala index 77616944d2..9469c00685 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala @@ -28,8 +28,11 @@ import org.apache.linkis.engineconn.computation.executor.execute.EngineExecution import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext +import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel +import java.util + class ExecuteOnceHook extends ComputationExecutorHook with ExecutorLockListener with Logging { private var executeOnce = false @@ -47,6 +50,11 @@ class ExecuteOnceHook extends ComputationExecutorHook with ExecutorLockListener codeBeforeHook: String ): String = { executeOnce = engineExecutionContext.getLabels.exists(_.isInstanceOf[ExecuteOnceLabel]) + val creationLabelList: util.List[Label[_]] = engineCreationContext.getLabels() + if (creationLabelList != null) { + executeOnce = + executeOnce || creationLabelList.toArray().exists(_.isInstanceOf[ExecuteOnceLabel]) + } if (executeOnce && !isRegister) { isRegister = true asyncListenerBusContext.addListener(this) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala index f96896f557..4446bdc672 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala @@ -91,4 +91,12 @@ object ComputationEngineConnMetrics { getTotalBusyTimeMills(nodeStatus) + getTotalIdleTimeMills(nodeStatus) def getUnlockToShutdownDurationMills(): Long = unlockToShutdownDurationMills.get() + + def getLastUnlockTimestamp(nodeStatus: NodeStatus): Long = { + nodeStatus match { + case NodeStatus.Unlock => lastUnlockTimeMills + case _ => 0 + } + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala index 1f8c491ced..55b88f520b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala @@ -19,7 +19,8 @@ package org.apache.linkis.engineconn.computation.executor.rs import org.apache.linkis.common.io.{MetaData, Record} import org.apache.linkis.common.io.resultset.ResultSetWriter -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.storage.LineRecord @@ -45,6 +46,9 @@ class RsOutputStream extends OutputStream with Logging { } def reset(engineExecutionContext: EngineExecutionContext): Unit = { + if (ComputationExecutorConf.CLOSE_RS_OUTPUT_WHEN_RESET_BY_DEFAULT_ENABLED) { + Utils.tryQuietly(close()) + } writer = engineExecutionContext.createDefaultResultSetWriter() writer.addMetaData(null) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala index 010ced97fd..e5d74282de 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala @@ -66,6 +66,10 @@ class DefaultNodeHeartbeatMsgManager extends NodeHeartbeatMsgManager with Loggin ECConstants.EC_TOTAL_LOCK_TIME_MILLS_KEY, ComputationEngineConnMetrics.getTotalLockTimeMills(status).asInstanceOf[Object] ) + msgMap.put( + ECConstants.EC_LAST_UNLOCK_TIMESTAMP, + ComputationEngineConnMetrics.getLastUnlockTimestamp(status).asInstanceOf[Object] + ) case _ => } val engineParams = EngineConnObject.getEngineCreationContext.getOptions diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala index bc738d5498..28df29db31 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala @@ -50,6 +50,7 @@ import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineconn.executor.listener.event.EngineConnSyncEvent +import org.apache.linkis.engineconn.launch.EngineConnServer import org.apache.linkis.governance.common.constant.ec.ECConstants import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.exception.engineconn.{ @@ -58,11 +59,13 @@ import org.apache.linkis.governance.common.exception.engineconn.{ } import org.apache.linkis.governance.common.protocol.task._ import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.hadoop.common.utils.KerberosUtils import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.resource.{ ResponseTaskRunningInfo, ResponseTaskYarnResource } +import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.protocol.message.RequestProtocol @@ -106,19 +109,12 @@ class TaskExecutionServiceImpl private lazy val executorManager = ExecutorManager.getInstance private val taskExecutedNum = new AtomicInteger(0) private var lastTask: EngineConnTask = _ - private var lastTaskFuture: Future[_] = _ + private var syncLastTaskThread: Thread = _ private var lastTaskDaemonFuture: Future[_] = _ - // for concurrent executor - private var consumerThread: Thread = _ - private var concurrentTaskQueue: BlockingQueue[EngineConnTask] = _ - @Autowired private var lockService: LockService = _ - private val asyncListenerBusContext = - ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus - private val syncListenerBus = ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnSyncListenerBus @@ -133,18 +129,19 @@ class TaskExecutionServiceImpl "ConcurrentEngineConnThreadPool" ) - private val CONCURRENT_TASK_LOCKER = new Object - private val taskAsyncSubmitExecutor: ExecutionContextExecutorService = Utils.newCachedExecutionContext( ComputationExecutorConf.TASK_ASYNC_MAX_THREAD_SIZE, - "TaskExecution-Thread-" + ComputationEngineConstant.TASK_EXECUTION_THREAD ) @PostConstruct def init(): Unit = { LogHelper.setLogListener(this) syncListenerBus.addListener(this) + if (ComputationExecutorConf.ENGINE_KERBEROS_AUTO_REFRESH_ENABLED) { + KerberosUtils.startKerberosRefreshThread() + } } private def sendToEntrance(task: EngineConnTask, msg: RequestProtocol): Unit = { @@ -167,6 +164,12 @@ class TaskExecutionServiceImpl } } + /** + * submit to async thread return submit response + * @param requestTask + * @param sender + * @return + */ @Receiver override def execute(requestTask: RequestTask, sender: Sender): ExecuteResponse = Utils.tryFinally { @@ -201,7 +204,11 @@ class TaskExecutionServiceImpl ) } - val taskId: Int = taskExecutedNum.incrementAndGet() + val taskId: String = if (StringUtils.isNotBlank(jobId)) { + jobId + } else { + String.valueOf(taskExecutedNum.incrementAndGet()) + } val retryAble: Boolean = { val retry = requestTask.getProperties.getOrDefault( @@ -216,7 +223,7 @@ class TaskExecutionServiceImpl System.getProperties.put(ComputationExecutorConf.JOB_ID_TO_ENV_KEY, jobId) logger.info(s"Received job with id ${jobId}.") } - val task = new CommonEngineConnTask(String.valueOf(taskId), retryAble) + val task = new CommonEngineConnTask(taskId, retryAble) task.setCode(requestTask.getCode) task.setProperties(requestTask.getProperties) task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock) @@ -277,19 +284,6 @@ class TaskExecutionServiceImpl } } - private def restExecutorLabels(labels: Array[Label[_]]): Array[Label[_]] = { - var newLabels = labels - ExecutorLabelsRestHook.getExecutorLabelsRestHooks.foreach(hooke => - newLabels = hooke.restExecutorLabels(newLabels) - ) - newLabels - } - - // override def taskStatus(taskID: String): ResponseTaskStatus = { - // val task = taskIdCache.get(taskID) - // ResponseTaskStatus(taskID, task.getStatus.id) - // } - private def submitTask( task: CommonEngineConnTask, computationExecutor: ComputationExecutor @@ -309,100 +303,58 @@ class TaskExecutionServiceImpl task: CommonEngineConnTask, computationExecutor: ComputationExecutor ): ExecuteResponse = { - var response: ExecuteResponse = SubmitResponse(task.getTaskId) - Utils.tryCatch { - computationExecutor.execute(task) - } { t => - logger.error(s"Failed to submit task${task.getTaskId} ", t) - response = ErrorExecuteResponse("Failed to submit task", t) - null - } - response + computationExecutor.execute(task) } private def submitSyncTask( task: CommonEngineConnTask, computationExecutor: ComputationExecutor ): ExecuteResponse = { - val runTask = new Runnable { - override def run(): Unit = Utils.tryAndWarn { - LogHelper.dropAllRemainLogs() - executeTask(task, computationExecutor) - } - } + LogHelper.dropAllRemainLogs() lastTask = task - lastTaskFuture = Utils.defaultScheduler.submit(runTask) - lastTaskDaemonFuture = openDaemonForTask(task, lastTaskFuture, Utils.defaultScheduler) - SubmitResponse(task.getTaskId) + syncLastTaskThread = Thread.currentThread() + lastTaskDaemonFuture = openDaemonForTask(task, Utils.defaultScheduler) + val res = executeTask(task, computationExecutor) + res } private def submitConcurrentTask( task: CommonEngineConnTask, executor: ConcurrentComputationExecutor ): ExecuteResponse = { - if (null == concurrentTaskQueue) CONCURRENT_TASK_LOCKER.synchronized { - if (null == concurrentTaskQueue) { - concurrentTaskQueue = new LinkedBlockingDeque[EngineConnTask]() - } - } - concurrentTaskQueue.put(task) - if (null == consumerThread) CONCURRENT_TASK_LOCKER.synchronized { - if (null == consumerThread) { - consumerThread = new Thread(createConsumerRunnable(executor)) - consumerThread.setDaemon(true) - consumerThread.setName("ConcurrentTaskQueueFifoConsumerThread") - consumerThread.start() - } - } - SubmitResponse(task.getTaskId) - } - - private def createConsumerRunnable(executor: ComputationExecutor): Thread = { - val consumerRunnable = new Runnable { + val concurrentJob = new Runnable { override def run(): Unit = { - var errCount = 0 - val ERR_COUNT_MAX = 20 - while (true) { - Utils.tryCatch { - if (!executor.isBusy && !executor.isClosed) { - val task = concurrentTaskQueue.take() - val concurrentJob = new Runnable { - override def run(): Unit = { - lastTask = task - Utils.tryCatch { - logger.info(s"Start to run task ${task.getTaskId}") - executeTask(task, executor) - } { case t: Throwable => - errCount += 1 - logger.error(s"Execute task ${task.getTaskId} failed :", t) - if (errCount > ERR_COUNT_MAX) { - logger.error( - s"Executor run failed for ${errCount} times over ERROR_COUNT_MAX : ${ERR_COUNT_MAX}, will shutdown." - ) - executor.transition(NodeStatus.ShuttingDown) - } - } - } - } - cachedThreadPool.submit(concurrentJob) - } - Thread.sleep(20) - } { case t: Throwable => - logger.error(s"consumerThread failed :", t) - } + Utils.tryCatch { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + logger.info(s"Start to run task ${task.getTaskId}") + executeTask(task, executor) + } { case t: Throwable => + logger.warn("Failed to execute task ", t) + sendToEntrance( + task, + ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) + ) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) + LoggerUtils.removeJobIdMDC() + null } } } - new Thread(consumerRunnable) + Utils.tryCatch(cachedThreadPool.submit(concurrentJob)) { case e: Exception => + logger.error(s"Failed to submit task ${task.getTaskId}", e) + throw e + } + SubmitResponse(task.getTaskId) } - private def executeTask(task: EngineConnTask, executor: ComputationExecutor): Unit = + private def executeTask(task: EngineConnTask, executor: ComputationExecutor): ExecuteResponse = Utils.tryFinally { val jobId = JobUtils.getJobIdFromMap(task.getProperties) LoggerUtils.setJobIdMDC(jobId) executor.execute(task) - clearCache(task.getTaskId) } { + clearCache(task.getTaskId) LoggerUtils.removeJobIdMDC() } @@ -415,39 +367,54 @@ class TaskExecutionServiceImpl * scheduler * @return */ - private def openDaemonForTask( - task: EngineConnTask, - taskFuture: Future[_], - scheduler: ExecutorService - ): Future[_] = { + private def openDaemonForTask(task: EngineConnTask, scheduler: ExecutorService): Future[_] = { val sleepInterval = ComputationExecutorConf.ENGINE_PROGRESS_FETCH_INTERVAL.getValue scheduler.submit(new Runnable { - override def run(): Unit = Utils.tryAndWarn { + override def run(): Unit = { + logger.info(s"start daemon thread ${task.getTaskId}, ${task.getStatus}") Utils.tryQuietly(Thread.sleep(TimeUnit.MILLISECONDS.convert(1, TimeUnit.SECONDS))) - while (null != taskFuture && !taskFuture.isDone) { - if ( - ExecutionNodeStatus.isCompleted(task.getStatus) || ExecutionNodeStatus - .isRunning(task.getStatus) - ) { - val progressResponse = taskProgress(task.getTaskId) - val resourceResponse = buildResourceMap(task) - val extraInfoMap = buildExtraInfoMap(task) - // todo add other info - val resourceMap = - if (null != resourceResponse) resourceResponse.getResourceMap else null - - val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( - progressResponse.execId, - progressResponse.progress, - progressResponse.progressInfo, + while (!ExecutionNodeStatus.isCompleted(task.getStatus)) { + Utils.tryAndWarn { + val progressResponse = Utils.tryCatch(taskProgress(task.getTaskId)) { + case e: Exception => + logger.info("Failed to get progress", e) + null + } + val resourceResponse = Utils.tryCatch(buildResourceMap(task)) { case e: Exception => + logger.info("Failed to get resource", e) + null + } + val extraInfoMap = Utils.tryCatch(buildExtraInfoMap(task)) { case e: Exception => + logger.info("Failed to get extra info ", e) + null + } + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null + + /** + * It is guaranteed that there must be progress the progress must be greater than or + * equal to 0.1 + */ + val newProgressResponse = if (null == progressResponse) { + ResponseTaskProgress(task.getTaskId, 0.1f, null) + } else if (progressResponse.progress < 0.1f) { + ResponseTaskProgress(task.getTaskId, 0.1f, progressResponse.progressInfo) + } else { + progressResponse + } + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( + newProgressResponse.execId, + newProgressResponse.progress, + newProgressResponse.progressInfo, resourceMap, extraInfoMap ) - sendToEntrance(task, respRunningInfo) - Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) } + Utils.tryQuietly( + Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) + ) } + logger.info(s"daemon thread exit ${task.getTaskId}, ${task.getStatus}") } }) } @@ -476,7 +443,7 @@ class TaskExecutionServiceImpl taskYarnResource(task.getTaskId) match { case responseTaskYarnResource: ResponseTaskYarnResource => if ( - responseTaskYarnResource.getResourceMap != null && !responseTaskYarnResource.getResourceMap.isEmpty + responseTaskYarnResource.resourceMap != null && !responseTaskYarnResource.resourceMap.isEmpty ) { responseTaskYarnResource } else { @@ -493,13 +460,13 @@ class TaskExecutionServiceImpl executor match { case executor: ResourceFetchExecutor => val resourceWithApplicationId = executor.FetchResource - new ResponseTaskYarnResource(taskID, resourceWithApplicationId) + ResponseTaskYarnResource(taskID, resourceWithApplicationId) case _ => null } } override def taskProgress(taskID: String): ResponseTaskProgress = { - var response = ResponseTaskProgress(taskID, 0, null) + var response = ResponseTaskProgress(taskID, 0.01f, null) if (StringUtils.isBlank(taskID)) return response val executor = taskIdCache.getIfPresent(taskID) if (null != executor) { @@ -514,11 +481,9 @@ class TaskExecutionServiceImpl ResponseTaskProgress(taskID, progress, executor.getProgressInfo(taskID)) ) } - } else { - response = ResponseTaskProgress(taskID, -1, null) } } else { - logger.error(s"Executor of taskId : $taskID is not cached.") + logger.info(s"Executor of taskId : $taskID is not cached.") } response } @@ -536,16 +501,20 @@ class TaskExecutionServiceImpl override def killTask(taskID: String): Unit = { val executor = taskIdCache.getIfPresent(taskID) if (null != executor) { - executor.killTask(taskID) + Utils.tryAndWarn(executor.killTask(taskID)) logger.info(s"TaskId : ${taskID} was killed by user.") } else { logger.error(s"Kill failed, got invalid executor : null for taskId : ${taskID}") } if (null != lastTask && lastTask.getTaskId.equalsIgnoreCase(taskID)) { - if (null != lastTaskFuture && !lastTaskFuture.isDone) { - Utils.tryAndWarn { - lastTaskFuture.cancel(true) - } + if (null != syncLastTaskThread) { + logger.info(s"try to interrupt thread:${taskID}") + Utils.tryAndWarn(syncLastTaskThread.interrupt()) + logger.info(s"thread isInterrupted:${taskID}") + } else { + logger.info(s"skip to force stop thread:${taskID}") + } + if (null != lastTaskDaemonFuture && !lastTaskDaemonFuture.isDone) { Utils.tryAndWarn { // Close the daemon also lastTaskDaemonFuture.cancel(true) @@ -606,7 +575,7 @@ class TaskExecutionServiceImpl logger.warn("Unknown event : " + BDPJettyServerHelper.gson.toJson(event)) } - override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = { + override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = Utils.tryAndWarn { if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { if (null != logUpdateEvent && StringUtils.isNotBlank(logUpdateEvent.taskId)) { val task = getTaskByTaskId(logUpdateEvent.taskId) @@ -639,7 +608,6 @@ class TaskExecutionServiceImpl val task = getTaskByTaskId(taskStatusChangedEvent.taskId) if (null != task) { if (ExecutionNodeStatus.isCompleted(taskStatusChangedEvent.toStatus)) { - lastTask = task LogHelper.pushAllRemainLogs() } val toStatus = taskStatusChangedEvent.toStatus @@ -663,32 +631,33 @@ class TaskExecutionServiceImpl } } - override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = { - if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { - val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) - if (null != task) { - val resourceResponse = buildResourceMap(task) - val extraInfoMap = buildExtraInfoMap(task) + override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = + Utils.tryAndWarn { + if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { + val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) + if (null != task) { + val resourceResponse = buildResourceMap(task) + val extraInfoMap = buildExtraInfoMap(task) - val resourceMap = if (null != resourceResponse) resourceResponse.getResourceMap else null + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null - val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( - taskProgressUpdateEvent.taskId, - taskProgressUpdateEvent.progress, - taskProgressUpdateEvent.progressInfo, - resourceMap, - extraInfoMap - ) + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( + taskProgressUpdateEvent.taskId, + taskProgressUpdateEvent.progress, + taskProgressUpdateEvent.progressInfo, + resourceMap, + extraInfoMap + ) - sendToEntrance(task, respRunningInfo) - } else { - logger.error( - "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON - .toJson(taskProgressUpdateEvent) - ) + sendToEntrance(task, respRunningInfo) + } else { + logger.error( + "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON + .toJson(taskProgressUpdateEvent) + ) + } } } - } override def onResultSetCreated(taskResultCreateEvent: TaskResultCreateEvent): Unit = { logger.info(s"start to deal result event ${taskResultCreateEvent.taskId}") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala index 1f13380c01..c7d78ef9eb 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala @@ -57,7 +57,7 @@ class ECTaskEntranceMonitor def unregister(taskID: String): Unit = { if (!wrapperMap.containsKey(taskID)) { - logger.error("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID) + logger.warn("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID) } wrapperMap.remove(taskID) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala index f2b894ef91..3cc1fdfe82 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala @@ -18,6 +18,8 @@ package org.apache.linkis.engineconn.computation.executor.upstream import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration +import org.apache.linkis.engineconn.acessible.executor.service.ExecutorHeartbeatServiceHolder import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.computation.executor.upstream.access.{ ConnectionInfoAccess, @@ -29,6 +31,8 @@ import org.apache.linkis.engineconn.computation.executor.upstream.handler.{ } import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ConnectionInfoWrapper import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor import org.apache.commons.lang3.concurrent.BasicThreadFactory @@ -112,6 +116,23 @@ abstract class SingleThreadUpstreamConnectionMonitor( "requesting connection info: " + util.Arrays .toString(Collections.list(wrapperMap.keys).toArray()) ) + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { + val executor = ExecutorManager.getInstance.getReportExecutor + executor match { + case concurrentExecutor: ConcurrentExecutor => + if (toBeRequested.size() > (concurrentExecutor.getConcurrentLimit + 20)) { + logger.warn( + s"Executor running task has exceed the limit ${toBeRequested.size()}, executor id ${concurrentExecutor.getId}" + ) + ExecutorHeartbeatServiceHolder + .getDefaultHeartbeatService() + .setSelfUnhealthy( + s"running task has exceed the limit: ${concurrentExecutor.getConcurrentLimit}" + ) + } + case _ => + } + } val infoAccessRequest = generateInfoAccessRequest(toBeRequested) val connectionInfoList = infoAccess.getUpstreamInfo(infoAccessRequest) logger.info( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala index d06e8ac077..15e70315e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala @@ -39,10 +39,10 @@ class ECTaskKillHandler extends MonitorHandler with Logging { while (elements.hasNext) { val element = elements.next Utils.tryCatch { - doKill(element) logger.error( s"ERROR: entrance : ${element.getUpstreamConnection().getUpstreamServiceInstanceName()} lose connect, will kill job : ${element.getKey()}" ) + doKill(element) } { t => logger.error("Failed to kill job: " + element.getKey, t) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala index 0acc47260e..44a80cff62 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala @@ -66,8 +66,7 @@ class ECTaskEntranceMonitorService logger.info("registering new task: " + event.taskId) eCTaskEntranceMonitor.register(event.task, event.executor) } else if ( - fromStatus == ExecutionNodeStatus.Running && - (toStatus == ExecutionNodeStatus.Succeed || toStatus == ExecutionNodeStatus.Failed || toStatus == ExecutionNodeStatus.Cancelled || toStatus == ExecutionNodeStatus.Timeout) + !ExecutionNodeStatus.isCompleted(fromStatus) && ExecutionNodeStatus.isCompleted(toStatus) ) { logger.info("unRegistering task: " + event.taskId) eCTaskEntranceMonitor.unregister(event.task.getTaskId) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala index 2a03b405cb..4e50fc0cf0 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala @@ -27,4 +27,6 @@ object ComputationEngineConstant { def CS_HOOK_ORDER: Int = -1 + val TASK_EXECUTION_THREAD = "TaskExecution-Thread-" + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala index eefc69f5bb..a20358b57c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala @@ -61,6 +61,8 @@ object EngineConnConf { val ENGINE_CONN_LOCAL_LOG_DIRS_KEY = CommonVars("wds.linkis.engine.logs.dir.key", "LOG_DIRS") + val ENGINE_CONN_LOCAL_TMP_DIR = CommonVars("wds.linkis.engine.tmp.dir", "TEMP_DIRS") + val ENGINE_CONN_CREATION_WAIT_TIME = CommonVars("wds.linkis.engine.connector.init.time", new TimeType("8m")) @@ -79,11 +81,13 @@ object EngineConnConf { val HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX = CommonVars("wds.linkis.hive.engine.yarn.app.id.parse.regex", "(application_\\d{13}_\\d+)") - val SEATUNNEL_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX = - CommonVars("wds.linkis.seatunnel.engine.yarn.app.id.parse.regex", "(application_\\d{13}_\\d+)") + val JOB_YARN_TASK_URL = CommonVars("linkis.job.task.yarn.url", ""); + val JOB_YARN_CLUSTER_TASK_URL = CommonVars("linkis.job.task.yarn.cluster.url", ""); def getWorkHome: String = System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) + def getEngineTmpDir: String = System.getenv(ENGINE_CONN_LOCAL_TMP_DIR.getValue) + def getLogDir: String = { val logDir = System.getenv(ENGINE_CONN_LOCAL_LOG_DIRS_KEY.getValue) if (StringUtils.isNotEmpty(logDir)) logDir else new File(getWorkHome, "logs").getPath diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala index e74f5b7898..fec2756f9f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala @@ -20,4 +20,12 @@ package org.apache.linkis.engineconn.common.conf object EngineConnConstant { val MAX_TASK_NUM = 10000 + + val SPRING_CONF_MAP_NAME = "SpringConfMap" + + val MAX_EXECUTOR_ID_NAME = "MaxExecutorId" + + var hiveLogReg = "The url to track the job: http://.*?/proxy/(application_[0-9]+_[0-9]+)/" + + val YARN_LOG_URL = "Yarn application url:" } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala index 86ab8a1f68..524f44c33a 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.core.hook import org.apache.linkis.common.utils.Logging import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.locks.ReentrantLock class ShutdownHook extends Logging { @@ -35,6 +36,10 @@ class ShutdownHook extends Logging { // Guarded by "lock" private var stopped: Boolean = false + private val tryStopTimes = new AtomicInteger(0) + + private val maxTimes = 10; + def notifyError(e: Throwable): Unit = { lock.lock() try { @@ -49,12 +54,17 @@ class ShutdownHook extends Logging { def notifyStop(): Unit = { lock.lock() + val num = tryStopTimes.incrementAndGet() try { setExitCode(0) stopped = true condition.signalAll() } finally { lock.unlock() + if (num >= maxTimes) { + logger.error(s"try to stop with times:${num}, now do system exit!!!") + System.exit(0) + } } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala index aa69bdae8c..d2247a6d2e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala @@ -17,7 +17,7 @@ package org.apache.linkis.engineconn.core.util -import com.google.gson.{Gson, GsonBuilder, ToNumberPolicy} +import com.google.gson.{GsonBuilder, ToNumberPolicy} object EngineConnUtils { diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java similarity index 50% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java index f8e01a682c..a1d74fadc6 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java @@ -15,33 +15,24 @@ * limitations under the License. */ -package org.apache.linkis.storage.script.compaction; +package org.apache.linkis.engineconn.acessible.executor.log; -import org.apache.linkis.storage.script.Compaction; -import org.apache.linkis.storage.script.Variable; +import org.apache.linkis.common.log.LogUtils; -public abstract class CommonScriptCompaction implements Compaction { +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; - @Override - public String compact(Variable variable) { - switch (variable.getSortParent()) { - case "variable": - return prefix() + " " + variable.getKey() + "=" + variable.getValue(); - default: - return prefixConf() - + " " - + variable.getSortParent() - + " " - + variable.getSort() - + " " - + variable.getKey() - + "=" - + variable.getValue(); +public abstract class AbstractLogCache implements LogCache { + protected String generateLog(LogEvent event) { + if (event.getLevel() == Level.INFO) { + return LogUtils.generateInfo(event.getMessage().toString()); + } else if (event.getLevel() == Level.WARN) { + return LogUtils.generateWarn(event.getMessage().toString()); + } else if (event.getLevel() == Level.ERROR) { + return LogUtils.generateERROR(event.getMessage().toString()); + } else if (event.getLevel() == Level.FATAL) { + return LogUtils.generateSystemError(event.getMessage().toString()); } - } - - @Override - public String getAnnotationSymbol() { - return prefix().split("@")[0]; + return ""; } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java index e158bdc86b..e80298cef1 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java @@ -27,7 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class MountLogCache implements LogCache { +public class MountLogCache extends AbstractLogCache { private static final Logger logger = LoggerFactory.getLogger(MountLogCache.class); diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java index cde56ca03a..05976bb2c7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java @@ -18,15 +18,20 @@ package org.apache.linkis.engineconn.acessible.executor.log; import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration; +import org.apache.linkis.engineconn.common.conf.EngineConnConf; +import org.apache.linkis.engineconn.common.conf.EngineConnConstant; +import org.apache.linkis.engineconn.common.creation.EngineCreationContext; +import org.apache.linkis.engineconn.core.EngineConnObject; import org.apache.linkis.engineconn.executor.listener.EngineConnSyncListenerBus; import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.manager.label.entity.Label; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.Filter; import org.apache.logging.log4j.core.Layout; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; -import org.apache.logging.log4j.core.config.Property; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.PluginAttribute; import org.apache.logging.log4j.core.config.plugins.PluginElement; @@ -34,6 +39,8 @@ import org.apache.logging.log4j.core.layout.PatternLayout; import java.io.Serializable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,9 +72,12 @@ public SendAppender( final Filter filter, final Layout layout, final boolean ignoreExceptions) { - super(name, filter, layout, ignoreExceptions, Property.EMPTY_ARRAY); + super(name, filter, layout, ignoreExceptions); this.logCache = LogHelper.logCache(); + // SendThread thread = new SendThread(); logger.info("SendAppender init success"); + // TIMER.schedule(thread, 2000, (Integer) + // AccessibleExecutorConfiguration.ENGINECONN_LOG_SEND_TIME_INTERVAL().getValue()); } @Override @@ -91,6 +101,7 @@ public void append(LogEvent event) { } } if (!flag) { + // logStr = matchLog(logStr); logCache.cacheLog(logStr); } } else { @@ -113,4 +124,28 @@ public static SendAppender createAppender( } return new SendAppender(name, filter, layout, ignoreExceptions); } + + /** + * * + * + *

Match the hive log, if it matches the yarn log, print the log and replace it + */ + public String matchLog(String logLine) { + Matcher hiveMatcher = Pattern.compile(EngineConnConstant.hiveLogReg()).matcher(logLine); + if (hiveMatcher.find()) { + String yarnUrl = EngineConnConf.JOB_YARN_TASK_URL().getValue(); + EngineCreationContext engineContext = EngineConnObject.getEngineCreationContext(); + if (null != engineContext) { + for (Label label : engineContext.getLabels()) { + if (label.getLabelKey().equals(LabelKeyConstant.YARN_CLUSTER_KEY)) { + yarnUrl = EngineConnConf.JOB_YARN_CLUSTER_TASK_URL().getValue(); + } + } + } + logLine = + hiveMatcher.replaceAll( + EngineConnConstant.YARN_LOG_URL() + yarnUrl + hiveMatcher.group(1)); + } + return logLine; + } } diff --git a/linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java similarity index 68% rename from linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java index 8c4d4d1447..2ba3efc63d 100644 --- a/linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java @@ -15,30 +15,27 @@ * limitations under the License. */ -package org.apache.linkis.cs.listener.test; +package org.apache.linkis.engineconn.acessible.executor.log; -import org.apache.linkis.cs.common.entity.source.ContextValue; +import java.util.List; -public class TestContextValue implements ContextValue { - private Object value; - - private String keywords; +/** Description: Cache with time as storage unit(以时间作为存储单位的缓存方式) */ +public class TimeLogCache extends AbstractLogCache { + @Override + public void cacheLog(String log) {} @Override - public String getKeywords() { + public List getLog(int num) { return null; } @Override - public void setKeywords(String keywords) {} - - @Override - public Object getValue() { - return this.value; + public List getRemain() { + return null; } @Override - public void setValue(Object value) { - this.value = value; + public int size() { + return 0; } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java deleted file mode 100644 index 66e1c575f0..0000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.acessible.executor.operator.impl; - -import org.apache.linkis.engineconn.common.exception.EngineConnException; -import org.apache.linkis.engineconn.core.executor.ExecutorManager$; -import org.apache.linkis.engineconn.core.executor.LabelExecutorManager; -import org.apache.linkis.engineconn.executor.entity.Executor; -import org.apache.linkis.engineconn.executor.entity.YarnExecutor; -import org.apache.linkis.governance.common.constant.ec.ECConstants; -import org.apache.linkis.manager.common.operator.Operator; - -import java.util.HashMap; -import java.util.Map; - -public class EngineConnApplicationInfoOperator implements Operator { - - public static final String OPERATOR_NAME = "engineConnYarnApplication"; - - @Override - public String[] getNames() { - return new String[] {OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - LabelExecutorManager instance = ExecutorManager$.MODULE$.getInstance(); - Executor reportExecutor = instance.getReportExecutor(); - if (reportExecutor instanceof YarnExecutor) { - YarnExecutor yarnExecutor = (YarnExecutor) reportExecutor; - Map result = new HashMap<>(); - result.put(ECConstants.YARN_APPID_NAME_KEY(), yarnExecutor.getApplicationId()); - result.put(ECConstants.YARN_APP_URL_KEY(), yarnExecutor.getApplicationURL()); - result.put(ECConstants.QUEUE(), yarnExecutor.getQueue()); - result.put(ECConstants.YARN_MODE_KEY(), yarnExecutor.getYarnMode()); - return result; - } else { - throw new EngineConnException( - 20301, "EngineConn is not a yarn application, cannot fetch applicaiton info."); - } - } -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala index 95a01202e8..0cebf5ed15 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala @@ -37,11 +37,14 @@ object AccessibleExecutorConfiguration { val ENGINECONN_LOG_SEND_SIZE = CommonVars[Int]("wds.linkis.engineconn.log.send.cache.size", 300) val ENGINECONN_MAX_FREE_TIME = - CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("30m")) + CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("5m")) val ENGINECONN_LOCK_CHECK_INTERVAL = CommonVars("wds.linkis.engineconn.lock.free.interval", new TimeType("3m")) + val ENGINECONN_ENABLED_LOCK_IDLE_TIME_OUT = + CommonVars("linkis.engineconn.enabled.lock.timeout.release", true) + val ENGINECONN_SUPPORT_PARALLELISM = CommonVars("wds.linkis.engineconn.support.parallelism", false) @@ -67,4 +70,10 @@ object AccessibleExecutorConfiguration { "Heartbeat status report repeated ignore, default 3ms,Negative numbers do not take effect" ).getValue + val ENGINECONN_AUTO_EXIT = + CommonVars("linkis.engineconn.support.auto.exit", false).getValue + + val ENGINECONN_AUTO_EXIT_DAYS = + CommonVars("linkis.engineconn.auto.exit.days", 7).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala index 0bd7ececef..e99f5f21c1 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.acessible.executor.execution import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor +import org.apache.linkis.engineconn.acessible.executor.service.ExecutorHeartbeatServiceHolder import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.common.execution.EngineConnExecution @@ -40,6 +41,7 @@ import org.apache.linkis.manager.common.protocol.resource.ResourceUsedProtocol import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender +import java.util.Random import java.util.concurrent.TimeUnit class AccessibleEngineConnExecution extends EngineConnExecution with Logging { @@ -73,6 +75,9 @@ class AccessibleEngineConnExecution extends EngineConnExecution with Logging { reportUsedResource(executor, engineCreationContext) reportLabel(executor) executorStatusChecker + if (AccessibleExecutorConfiguration.ENGINECONN_AUTO_EXIT) { + ecAutoExit() + } afterReportToLinkisManager(executor, engineCreationContext, engineConn) } @@ -140,6 +145,39 @@ class AccessibleEngineConnExecution extends EngineConnExecution with Logging { ) } + /** + * EC auto exit only support concurrent executor + */ + private def ecAutoExit(): Unit = { + logger.info(s"ec auto exit start ${System.currentTimeMillis()}") + Utils.defaultScheduler.schedule( + new Runnable { + override def run(): Unit = Utils.tryAndWarn { + ExecutorManager.getInstance.getReportExecutor match { + case executor: ConcurrentExecutor => + val rand = new Random + val minute = rand.nextInt(5) + 1 + Thread.sleep(minute * 60000L) + if (executor.hasTaskRunning()) { + ExecutorHeartbeatServiceHolder + .getDefaultHeartbeatService() + .setSelfUnhealthy(s"EC running time exceed max time") + } else { + logger.warn( + s"Executor has no task running ${executor.getId}, will be to shutdown ec" + ) + executor.tryShutdown() + } + case _ => + logger.warn(s"Executor is not a ConcurrentExecutor, do noting") + } + } + }, + AccessibleExecutorConfiguration.ENGINECONN_AUTO_EXIT_DAYS, + TimeUnit.DAYS + ) + } + def requestManagerReleaseExecutor(msg: String, nodeStatus: NodeStatus): Unit = { val engineReleaseRequest = new EngineConnReleaseRequest( Sender.getThisServiceInstance, @@ -204,7 +242,7 @@ class AccessibleEngineConnExecution extends EngineConnExecution with Logging { case resourceExecutor: ResourceExecutor => ManagerService.getManagerService .reportUsedResource( - new ResourceUsedProtocol( + ResourceUsedProtocol( Sender.getThisServiceInstance, resourceExecutor.getCurrentNodeResource(), engineCreationContext.getTicketId diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala index 4365d5881d..6b96b6d4ed 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala @@ -20,7 +20,7 @@ package org.apache.linkis.engineconn.acessible.executor.info import org.apache.linkis.common.utils.Logging import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.core.executor.ExecutorManager -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo import org.springframework.stereotype.Component @@ -29,20 +29,45 @@ trait NodeHealthyInfoManager { def getNodeHealthyInfo(): NodeHealthyInfo + def setNodeHealthy(healthy: NodeHealthy): Unit + + def getNodeHealthy(): NodeHealthy + + def setByManager(setByManager: Boolean): Unit + } @Component class DefaultNodeHealthyInfoManager extends NodeHealthyInfoManager with Logging { + private var healthy: NodeHealthy = NodeHealthy.Healthy + + private var setByManager: Boolean = false + override def getNodeHealthyInfo(): NodeHealthyInfo = { val nodeHealthyInfo = new NodeHealthyInfo nodeHealthyInfo.setMsg("") - nodeHealthyInfo.setNodeHealthy( + + /** 如果是manager主动设置的,则以manager设置的为准 */ + val newHealthy: NodeHealthy = if (this.setByManager) { + this.healthy + } else { NodeStatus.isEngineNodeHealthy( ExecutorManager.getInstance.getReportExecutor.asInstanceOf[AccessibleExecutor].getStatus ) - ) + } + logger.info("current node healthy status is {}", newHealthy) + nodeHealthyInfo.setNodeHealthy(newHealthy) nodeHealthyInfo } + override def setNodeHealthy(healthy: NodeHealthy): Unit = { + this.healthy = healthy + } + + override def setByManager(setByManager: Boolean): Unit = { + this.setByManager = setByManager + } + + override def getNodeHealthy(): NodeHealthy = this.healthy } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala index af4d1eb017..bb39545091 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala @@ -27,6 +27,7 @@ import org.apache.linkis.engineconn.acessible.executor.listener.event.{ ExecutorStatusChangedEvent, ExecutorUnLockEvent } +import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.SensibleExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext @@ -44,6 +45,10 @@ class EngineConnTimedLock(private var timeout: Long) var releaseTask: ScheduledFuture[_] = null var lastLockTime: Long = 0 + val idleTimeLockOut = AccessibleExecutorConfiguration.ENGINECONN_LOCK_CHECK_INTERVAL + .getValue(EngineConnObject.getEngineCreationContext.getOptions) + .toLong + override def acquire(executor: AccessibleExecutor): Unit = { lock.acquire() lastLockTime = System.currentTimeMillis() @@ -105,7 +110,9 @@ class EngineConnTimedLock(private var timeout: Long) isAcquired() && NodeStatus.Idle == reportExecutor.getStatus && isExpired() ) { // unlockCallback depends on lockedBy, so lockedBy cannot be set null before unlockCallback - logger.info(s"Lock : [${lock.toString} was released due to timeout.") + logger.info( + s"Lock : [${lock.toString} was released due to timeout. idleTimeLockOut $idleTimeLockOut" + ) release() } else if (isAcquired() && NodeStatus.Busy == reportExecutor.getStatus) { lastLockTime = System.currentTimeMillis() @@ -116,7 +123,7 @@ class EngineConnTimedLock(private var timeout: Long) } }, 3000, - AccessibleExecutorConfiguration.ENGINECONN_LOCK_CHECK_INTERVAL.getValue.toLong, + idleTimeLockOut, TimeUnit.MILLISECONDS ) logger.info("Add scheduled timeout task.") @@ -131,7 +138,11 @@ class EngineConnTimedLock(private var timeout: Long) override def isExpired(): Boolean = { if (lastLockTime == 0) return false if (timeout <= 0) return false - System.currentTimeMillis() - lastLockTime > timeout + if (AccessibleExecutorConfiguration.ENGINECONN_ENABLED_LOCK_IDLE_TIME_OUT.getValue) { + System.currentTimeMillis() - lastLockTime > idleTimeLockOut + } else { + System.currentTimeMillis() - lastLockTime > timeout + } } override def numOfPending(): Int = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala index 260e675a1d..a7169697ae 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala @@ -44,10 +44,17 @@ object LogHelper extends Logging { def setLogListener(logListener: LogListener): Unit = this.logListener = logListener + def cacheLog(log: String): Unit = { + logCache.cacheLog(log) + } + def pushAllRemainLogs(): Unit = { + // logger.info(s"start to push all remain logs") Thread.sleep(30) + // logCache.synchronized{ if (logListener == null) { logger.warn("logListener is null, can not push remain logs") + // return } else { var logs: util.List[String] = null diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala new file mode 100644 index 0000000000..c7635615e0 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.acessible.executor.operator.impl + +import org.apache.linkis.engineconn.acessible.executor.service.OperateService +import org.apache.linkis.engineconn.common.exception.EngineConnException +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.YarnExecutor +import org.apache.linkis.governance.common.constant.ec.ECConstants._ +import org.apache.linkis.manager.common.operator.Operator + +class EngineConnApplicationInfoOperator extends Operator { + + override def getNames: Array[String] = Array(EngineConnApplicationInfoOperator.OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + ExecutorManager.getInstance.getReportExecutor match { + case yarnExecutor: YarnExecutor => + Map( + YARN_APPID_NAME_KEY -> yarnExecutor.getApplicationId, + YARN_APP_URL_KEY -> yarnExecutor.getApplicationURL, + QUEUE -> yarnExecutor.getQueue, + YARN_MODE_KEY -> yarnExecutor.getYarnMode + ) + case _ => + throw EngineConnException( + 20301, + "EngineConn is not a yarn application, cannot fetch applicaiton info." + ) + } + } + +} + +object EngineConnApplicationInfoOperator { + val OPERATOR_NAME = "engineConnYarnApplication" +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala index 8ef944fc9c..97a9cab5da 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala @@ -113,7 +113,6 @@ class DefaultAccessibleService extends AccessibleService with Logging { logger.info("Reported status shuttingDown to manager.") Utils.tryQuietly(Thread.sleep(2000)) shutDownHooked = true - ShutdownHook.getShutdownHook.notifyStop() } override def stopExecutor: Unit = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala index ea3248ba6d..ff8e6666d1 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala @@ -31,8 +31,13 @@ import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.{Executor, ResourceExecutor, SensibleExecutor} import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineconn.executor.service.ManagerService -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.protocol.node.{NodeHeartbeatMsg, NodeHeartbeatRequest} +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo +import org.apache.linkis.manager.common.protocol.node.{ + NodeHealthyRequest, + NodeHeartbeatMsg, + NodeHeartbeatRequest +} import org.apache.linkis.rpc.Sender import org.apache.linkis.rpc.message.annotation.Receiver @@ -61,6 +66,8 @@ class DefaultExecutorHeartbeatService private val asyncListenerBusContext = ExecutorListenerBusContext.getExecutorListenerBusContext.getEngineConnAsyncListenerBus + private val healthyLock = new Object() + @PostConstruct private def init(): Unit = { asyncListenerBusContext.addListener(this) @@ -95,6 +102,16 @@ class DefaultExecutorHeartbeatService nodeHeartbeatRequest: NodeHeartbeatRequest ): NodeHeartbeatMsg = generateHeartBeatMsg(null) + @Receiver + def dealNodeHealthyRequest(nodeHealthyRequest: NodeHealthyRequest): Unit = + healthyLock synchronized { + val toHealthy = nodeHealthyRequest.getNodeHealthy + val healthyInfo: NodeHealthyInfo = nodeHealthyInfoManager.getNodeHealthyInfo() + logger.info(s"engine nodeHealthy from ${healthyInfo.getNodeHealthy} to ${toHealthy}") + nodeHealthyInfoManager.setByManager(true) + nodeHealthyInfoManager.setNodeHealthy(toHealthy) + } + override def onNodeHealthyUpdate(nodeHealthyUpdateEvent: NodeHealthyUpdateEvent): Unit = { logger.warn(s"node healthy update, tiger heartbeatReport") // val executor = ExecutorManager.getInstance.getReportExecutor @@ -139,4 +156,15 @@ class DefaultExecutorHeartbeatService nodeHeartbeatMsg } + override def setSelfUnhealthy(reason: String): Unit = healthyLock synchronized { + logger.info(s"Set self to unhealthy to automatically exit, reason: $reason") + if (EngineConnObject.isReady) { + val nodeHealthyInfo = nodeHealthyInfoManager.getNodeHealthyInfo() + if (nodeHealthyInfo.getNodeHealthy != NodeHealthy.UnHealthy) { + nodeHealthyInfoManager.setNodeHealthy(NodeHealthy.UnHealthy) + nodeHealthyInfoManager.setByManager(true) + } + } + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala index bc410c7186..fe3d731b7d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala @@ -57,8 +57,7 @@ class DefaultManagerService extends ManagerService with Logging { logger.info("engineType labels is empty, Not reported") return } - val labelReportRequest = - new LabelReportRequest(reportLabel.asJava, Sender.getThisServiceInstance) + val labelReportRequest = LabelReportRequest(reportLabel.asJava, Sender.getThisServiceInstance) getManagerSender.send(labelReportRequest) } @@ -76,6 +75,9 @@ class DefaultManagerService extends ManagerService with Logging { override def heartbeatReport(nodeHeartbeatMsg: NodeHeartbeatMsg): Unit = { getManagerSender.send(nodeHeartbeatMsg) + if (nodeHeartbeatMsg != null && nodeHeartbeatMsg.getHealthyInfo != null) { + logger.info("report engine healthy status: {}", nodeHeartbeatMsg.getHealthyInfo) + } logger.info( "success to send engine heartbeat report to {},status: {},msg: {}", Array( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala index 20399711bd..c0ef50636d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala @@ -30,8 +30,6 @@ import org.apache.commons.lang3.exception.ExceptionUtils import org.springframework.stereotype.Service -import java.util - import scala.collection.JavaConverters.mapAsScalaMapConverter @Service @@ -42,36 +40,25 @@ class DefaultOperateService extends OperateService with Logging { engineOperateRequest: EngineOperateRequest ): EngineOperateResponse = { var response: EngineOperateResponse = null - val parameters = { - val map = new util.HashMap[String, Object]() - engineOperateRequest.getParameters.asScala.foreach(entry => map.put(entry._1, entry._2)) - map - } - val operator = Utils.tryCatch(OperatorFactory.apply().getOperatorRequest(parameters)) { t => - logger.error(s"Get operator failed, parameters is ${engineOperateRequest.getParameters}.", t) - response = new EngineOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + + val parameters = engineOperateRequest.parameters.asScala.toMap + val operator = Utils.tryCatch(OperatorFactory().getOperatorRequest(parameters)) { t => + logger.error(s"Get operator failed, parameters is ${engineOperateRequest.parameters}.", t) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) doPostHook(engineOperateRequest, response) return response } logger.info( - s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${engineOperateRequest.getParameters}." + s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${engineOperateRequest.parameters}." ) val result = Utils.tryCatch(operator(parameters)) { t => logger.error(s"Execute ${operator.getClass.getSimpleName} failed.", t) - response = new EngineOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) doPostHook(engineOperateRequest, response) return response } logger.info(s"End to execute operator ${operator.getClass.getSimpleName}.") - response = new EngineOperateResponse(result) + response = EngineOperateResponse(result) doPostHook(engineOperateRequest, response) response } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala index b5bbc26f92..026234e938 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala @@ -81,12 +81,12 @@ class EngineConnTimedLockService extends LockService with Logging { @throws[EngineConnExecutorErrorException] override def tryLock(requestEngineLock: RequestEngineLock): Option[String] = synchronized { if (null != engineConnLock && engineConnLock.isAcquired()) return None - this.lockType = requestEngineLock.getLockType + this.lockType = requestEngineLock.lockType lockType match { case EngineLockType.Always => timedLock(-1) case EngineLockType.Timed => - timedLock(requestEngineLock.getTimeout) + timedLock(requestEngineLock.timeout) case o: Any => logger.error("Invalid lockType : " + BDPJettyServerHelper.gson.toJson(o)) return Some(null) @@ -172,11 +172,11 @@ class EngineConnTimedLockService extends LockService with Logging { @Receiver override def requestUnLock(requestEngineUnlock: RequestEngineUnlock): ResponseEngineUnlock = { - if (StringUtils.isBlank(requestEngineUnlock.getLock)) { + if (StringUtils.isBlank(requestEngineUnlock.lock)) { logger.error("Invalid requestEngineUnlock: ") - new ResponseEngineUnlock(false) + ResponseEngineUnlock(false) } else { - new ResponseEngineUnlock(unlock(requestEngineUnlock.getLock)) + ResponseEngineUnlock(unlock(requestEngineUnlock.lock)) } } @@ -221,7 +221,7 @@ class EngineConnConcurrentLockService extends LockService { @Receiver override def requestUnLock(requestEngineUnlock: RequestEngineUnlock): ResponseEngineUnlock = - new ResponseEngineUnlock(true) + ResponseEngineUnlock(true) override def onAddLock(addLockEvent: ExecutorLockEvent): Unit = {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala index 7abcbe8dcf..77344921e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala @@ -33,6 +33,8 @@ trait ExecutorHeartbeatService { def dealNodeHeartbeatRequest(nodeHeartbeatRequest: NodeHeartbeatRequest): NodeHeartbeatMsg + def setSelfUnhealthy(reason: String): Unit + } object ExecutorHeartbeatServiceHolder { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala index 28e4720953..0860076e40 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala @@ -49,15 +49,14 @@ trait LockService extends ExecutorLockListener with Logging { // Engine can be locked if (!StringUtils.isBlank(lockStr)) { // lock success - response = - new ResponseEngineLock(true, lockStr, s"Lock for ${requestEngineLock.getTimeout} ms") + response = ResponseEngineLock(true, lockStr, s"Lock for ${requestEngineLock.timeout} ms") } else { // lock failed - response = new ResponseEngineLock(false, lockStr, "lock str is blank") + response = ResponseEngineLock(false, lockStr, "lock str is blank") } case None => // Engine is busy - response = new ResponseEngineLock(false, null, "Engine is busy.") + response = ResponseEngineLock(false, null, "Engine is busy.") } logger.info( "RequestLock : " + BDPJettyServerHelper.gson.toJson( diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/ErrorCodeMapperTest.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala similarity index 58% rename from linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/ErrorCodeMapperTest.java rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala index 39d9d9b9f6..9b4a3ebc28 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/ErrorCodeMapperTest.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala @@ -15,26 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.errorcode.server.dao; +package org.apache.linkis.engineconn.acessible.executor.utils -import org.apache.linkis.errorcode.common.LinkisErrorCode; +import org.apache.linkis.DataWorkCloudApplication.getApplicationContext +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager +import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy -import org.springframework.beans.factory.annotation.Autowired; +object AccessibleExecutorUtils { -import java.util.List; + val manager: DefaultNodeHealthyInfoManager = + getApplicationContext.getBean(classOf[DefaultNodeHealthyInfoManager]) -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class ErrorCodeMapperTest extends BaseDaoTest { - - @Autowired private ErrorCodeMapper errorCodeMapper; - - @Test - @DisplayName("getAllErrorCodes") - public void getAllErrorCodesTest() { - List list = errorCodeMapper.getAllErrorCodes(); - Assertions.assertTrue(list.size() == 2); + /** 当前引擎是否不健康 不健康返回 true */ + def currentEngineIsUnHealthy(): Boolean = { + manager != null && manager.getNodeHealthy() == NodeHealthy.UnHealthy } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala index 07cfa51d0a..dfe1137084 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala @@ -29,10 +29,13 @@ import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.core.hook.ShutdownHook import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback +import org.apache.linkis.manager.label.constant.LabelValueConstant +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.linkis.server.conf.ServerConfiguration @@ -60,7 +63,15 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val newMap = map.++(parser.getSpringConfMap) newMap.put("spring.mvc.servlet.path", ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue) DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(newMap.toMap)) + val context = EngineConnObject.getEngineCreationContext + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + logger.info("cluster mode call back will be invoke in beforeExecutionExecute") + } else { + val engineConnPidCallBack = new EngineConnIdentifierCallback() + Utils.tryAndError(engineConnPidCallBack.callback()) + } logger.info("<--------------------SpringBoot App init succeed-------------------->") } @@ -68,8 +79,14 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { engineCreationContext: EngineCreationContext, engineConn: EngineConn ): Unit = { - val engineConnIdentifierCallback = new EngineConnIdentifierCallback() - Utils.tryAndError(engineConnIdentifierCallback.callback()) + val context = EngineConnObject.getEngineCreationContext + + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + logger.info("cluster mode call back be invoke") + val engineConnPidCallBack = new EngineConnIdentifierCallback() + Utils.tryAndError(engineConnPidCallBack.callback()) + } } override def afterExecutionExecute( @@ -85,7 +102,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val prefixMsg = Sender.getThisServiceInstance + s": log dir: ${EngineConnConf.getLogDir}," Utils.tryAndError( engineConnAfterStartCallback.callback( - new EngineConnStatusCallback( + EngineConnStatusCallback( Sender.getThisServiceInstance, engineCreationContext.getTicketId, NodeStatus.Failed, @@ -125,7 +142,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val engineConnAfterStartCallback = new EngineConnAfterStartCallback Utils.tryAndError( engineConnAfterStartCallback.callback( - new EngineConnStatusCallback( + EngineConnStatusCallback( Sender.getThisServiceInstance, engineCreationContext.getTicketId, getNodeStatusOfStartSuccess(engineCreationContext, engineConn), diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala index efd74e9077..d1eb83d391 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala @@ -35,7 +35,7 @@ abstract class AbstractEngineConnStartUpCallback() extends EngineConnCallback wi def callback(protocol: RequestProtocol): Unit = { protocol match { case protocol: EngineConnStatusCallback => - if (protocol.getStatus().equals(NodeStatus.Failed)) { + if (protocol.status.equals(NodeStatus.Failed)) { logger.error(s"EngineConn Start Failed protocol will send to LM: ${protocol}") } else { logger.info(s"protocol will send to lm: ${protocol}") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala similarity index 99% rename from linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala index 71f71f1999..8b9e3ad36e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala @@ -30,6 +30,7 @@ import java.lang.management.ManagementFactory class EngineConnIdentifierCallback extends AbstractEngineConnStartUpCallback { override def callback(): Unit = { + var identifier = ManagementFactory.getRuntimeMXBean.getName.split("@")(0) val instance = Sender.getThisServiceInstance val context = EngineConnObject.getEngineCreationContext diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala new file mode 100644 index 0000000000..f323bf2b2f --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.callback.service + +trait EngineConnTimedCallback extends EngineConnCallback {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala index a7675287e0..7f70e21e21 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala @@ -20,11 +20,11 @@ package org.apache.linkis.engineconn.executor import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.common.utils.Utils -import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.engineconn.executor.conf.EngineConnExecutorConfiguration +import org.apache.linkis.governance.common.utils.GovernanceUtils import org.apache.linkis.manager.label.entity.Label import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.time.DateFormatUtils import java.util.concurrent.atomic.AtomicInteger @@ -60,9 +60,12 @@ trait ExecutorExecutionContext { def setLabels(labels: Array[Label[_]]): Unit = this.labels = labels protected def getDefaultStorePath: String = { - val path = GovernanceCommonConf.RESULT_SET_STORE_PATH.getValue - val pathPrefix = (if (path.endsWith("/")) path else path + "/") + Utils.getJvmUser + "/" + - DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd") + "/" + val path = if (EngineConnExecutorConfiguration.LINKIS_RES_DEFAULT_ENABLED) { + GovernanceUtils.getResultParentPath(GovernanceUtils.LINKIS_DEFAULT_RES_CREATOR) + } else { + "hdfs:///apps-data/" + Utils.getJvmUser + } + val pathPrefix = (if (path.endsWith("/")) path else path + "/") + Utils.getJvmUser + "/" getJobId.map(pathPrefix + _ + "/" + System.nanoTime).getOrElse(pathPrefix + System.nanoTime) } @@ -81,11 +84,11 @@ trait ExecutorExecutionContext { protected def getDefaultResultSetByType: String def createDefaultResultSetWriter(): ResultSetWriter[_ <: MetaData, _ <: Record] = { - createResultSetWriter(getResultSetByType(getDefaultResultSetByType)) // todo check + createResultSetWriter(getResultSetByType(getDefaultResultSetByType)) } def createDefaultResultSetWriter(alias: String): ResultSetWriter[_ <: MetaData, _ <: Record] = - createResultSetWriter(getResultSetByType(getDefaultResultSetByType), alias) // todo check + createResultSetWriter(getResultSetByType(getDefaultResultSetByType), alias) def createResultSetWriter(resultSetType: String): ResultSetWriter[_ <: MetaData, _ <: Record] = createResultSetWriter(getResultSetByType(resultSetType), null) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala index 813022ceb8..f847b9c34f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala @@ -66,4 +66,7 @@ object EngineConnExecutorConfiguration { val DEFAULT_EXECUTOR_NAME = CommonVars("wds.linkis.engineconn.executor.default.name", "ComputationExecutor") + val LINKIS_RES_DEFAULT_ENABLED = + CommonVars("wds.linkis.engineconn.res.default.enabled", true).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java index 034022f169..24c8b904cd 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java @@ -30,11 +30,9 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode { CANNOT_DEFAULT_EF(20000, "Cannot find default ExecutorFactory(找不到默认的 ExecutorFactory)"), ETL_NOT_EXISTS(20000, "EngineTypeLabel does not exist(EngineTypeLabel 不存在)"), UCL_NOT_EXISTS(20000, "UserCreatorLabel does not exist(UserCreatorLabel 不存在)"), - CANNOT_HOME_PATH_EC( - 20001, "Cannot find the home path of engineConn at: {0}(找不到 engineConn 的 home 路径,该路径为:{0})"), + CANNOT_HOME_PATH_EC(20001, "Cannot find the home path of engineConn(找不到 engineConn 的 home 路径)"), CANNOT_HOME_PATH_DIST( - 20001, - "Could not find the home path for engineconn dist at: {0}(找不到 engineconn dist 的 home 路径,该路径为:{0})"), + 20001, "Cannot find the home path:{0} of engineconn dist(找不到 engineconn dist 的 home 路径)"), DIST_IS_EMPTY( 20001, "The dist of EngineConn is empty,engineConnType is:{0}(EngineConn 的 dist 为空,engineConnType为:{0})"), diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala index f3235ffa34..f0bae00a1b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala @@ -36,10 +36,8 @@ object EnvConfiguration { CommonVars[String]("HADOOP_CONF_DIR", "/appcom/config/hadoop-config").getValue ) - val ENGINE_CONN_JARS = CommonVars("wds.linkis.engineConn.jars", "", "engineConn额外的Jars") - val ENGINE_CONN_CLASSPATH_FILES = - CommonVars("wds.linkis.engineConn.files", "", "engineConn额外的配置文件") + CommonVars("linkis.engineConn.classpath.files", "", "engineConn额外的配置文件") val MAX_METASPACE_SIZE = CommonVars("linkis.engineconn.metaspace.size.max", "256m") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala new file mode 100644 index 0000000000..5f1537525c --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.engineplugin.common.exception + +object EngineConnPluginErrorCode { + + def INVALID_RUNTYPE: Int = 70101 + + def INVALID_LABELS: Int = 70102 + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala index d7ab70908a..41cf435459 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala @@ -17,9 +17,6 @@ package org.apache.linkis.manager.engineplugin.common.exception -import org.apache.linkis.common.exception.{ErrorException, ExceptionLevel, LinkisRuntimeException} +import org.apache.linkis.common.exception.ErrorException -class EngineConnPluginErrorException(code: Int, msg: String) - extends LinkisRuntimeException(code, msg) { - override def getLevel: ExceptionLevel = ExceptionLevel.ERROR -} +class EngineConnPluginErrorException(code: Int, msg: String) extends ErrorException(code, msg) {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala index 082b02a020..e46126523a 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala @@ -117,49 +117,27 @@ abstract class JavaProcessEngineConnLaunchBuilder addPathToClassPath(environment, variable(HIVE_CONF_DIR)) } // first, add engineconn conf dirs. - addPathToClassPath(environment, Seq(variable(PWD), ENGINE_CONN_CONF_DIR_NAME)) + addPathToClassPath(environment, buildPath(Seq(variable(PWD), ENGINE_CONN_CONF_DIR_NAME))) // then, add LINKIS_CONF_DIR conf dirs. - addPathToClassPath(environment, Seq(EnvConfiguration.LINKIS_CONF_DIR.getValue)) + addPathToClassPath(environment, buildPath(Seq(EnvConfiguration.LINKIS_CONF_DIR.getValue))) // then, add engineconn libs. - addPathToClassPath(environment, Seq(variable(PWD), ENGINE_CONN_LIB_DIR_NAME + "/*")) + addPathToClassPath(environment, buildPath(Seq(variable(PWD), ENGINE_CONN_LIB_DIR_NAME + "/*"))) // then, add public modules. if (!enablePublicModule) { - addPathToClassPath(environment, Seq(LINKIS_PUBLIC_MODULE_PATH.getValue + "/*")) + addPathToClassPath(environment, buildPath(Seq(LINKIS_PUBLIC_MODULE_PATH.getValue + "/*"))) } // finally, add the suitable properties key to classpath - engineConnBuildRequest.engineConnCreationDesc.properties.asScala.foreach { case (key, value) => - if ( - key - .startsWith("engineconn.classpath") || key.startsWith("wds.linkis.engineconn.classpath") - ) { - addPathToClassPath(environment, Seq(variable(PWD), new File(value).getName)) - } - } - getExtraClassPathFile.foreach { file: String => - addPathToClassPath(environment, Seq(variable(PWD), new File(file).getName)) + val taskClassPathFiles = EnvConfiguration.ENGINE_CONN_CLASSPATH_FILES.getValue( + engineConnBuildRequest.engineConnCreationDesc.properties + ) + if (StringUtils.isNotBlank(taskClassPathFiles)) { + taskClassPathFiles + .split(",") + .filter(StringUtils.isNotBlank(_)) + .foreach(file => addPathToClassPath(environment, buildPath(Seq(file)))) } - engineConnBuildRequest match { - case richer: RicherEngineConnBuildRequest => - def addFiles(files: String): Unit = if (StringUtils.isNotBlank(files)) { - files - .split(",") - .foreach(file => - addPathToClassPath(environment, Seq(variable(PWD), new File(file).getName)) - ) - } - - val configs: util.Map[String, String] = - richer.getStartupConfigs.asScala - .filter(_._2.isInstanceOf[String]) - .map { case (k, v: String) => - k -> v - } - .asJava - val jars: String = EnvConfiguration.ENGINE_CONN_JARS.getValue(configs) - addFiles(jars) - val files: String = EnvConfiguration.ENGINE_CONN_CLASSPATH_FILES.getValue(configs) - addFiles(files) - case _ => + getExtraClassPathFile.filter(StringUtils.isNotBlank(_)).foreach { file: String => + addPathToClassPath(environment, buildPath(Seq(new File(file).getName))) } environment } @@ -198,7 +176,7 @@ abstract class JavaProcessEngineConnLaunchBuilder ) ++: engineConnResource.getOtherBmlResources.toList }.asJava - private implicit def buildPath(paths: Seq[String]): String = + private def buildPath(paths: Seq[String]): String = Paths.get(paths.head, paths.tail: _*).toFile.getPath } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala index 3b3005fee6..8bcc79b410 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala @@ -22,7 +22,7 @@ import org.apache.linkis.protocol.message.RequestProtocol import java.util -trait EngineResourceRequest extends RequestProtocol { +trait EngineResourceRequest { val user: String val labels: util.List[Label[_]] val properties: util.Map[String, String] diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala index aada8caedc..02565a394b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala @@ -34,6 +34,9 @@ class UserNodeResource extends NodeResource { private var leftResource: Resource = _ private var createTime: Date = _ private var updateTime: Date = _ + private var maxApps: Int = _ + private var numPendingApps: Int = _ + private var numActiveApps: Int = _ def getUser: String = user @@ -87,4 +90,23 @@ class UserNodeResource extends NodeResource { override def getId: Integer = id override def setId(id: Integer): Unit = this.id = id + + override def getMaxApps: Integer = maxApps + + override def setMaxApps(maxApps: Integer): Unit = { + this.maxApps = maxApps + } + + override def getNumPendingApps: Integer = numPendingApps + + override def setNumPendingApps(numPendingApps: Integer): Unit = { + this.numPendingApps = numPendingApps + } + + override def getNumActiveApps: Integer = numActiveApps + + override def setNumActiveApps(numActiveApps: Integer): Unit = { + this.numActiveApps = numActiveApps + } + } diff --git a/linkis-computation-governance/linkis-entrance/pom.xml b/linkis-computation-governance/linkis-entrance/pom.xml index dea4d1d4d7..bda458c356 100644 --- a/linkis-computation-governance/linkis-entrance/pom.xml +++ b/linkis-computation-governance/linkis-entrance/pom.xml @@ -90,6 +90,12 @@ ${project.version} + + org.apache.linkis + linkis-ps-common-lock + ${project.version} + + diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java index 0bf27a68b3..1cf9a6b4b1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java @@ -42,16 +42,13 @@ import org.apache.linkis.entrance.persistence.QueryPersistenceManager; import org.apache.linkis.entrance.persistence.ResultSetEngine; import org.apache.linkis.entrance.scheduler.EntranceGroupFactory; +import org.apache.linkis.entrance.scheduler.EntranceParallelConsumerManager; import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; -import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder; -import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder$; -import org.apache.linkis.orchestrator.ecm.entity.Policy; import org.apache.linkis.scheduler.Scheduler; import org.apache.linkis.scheduler.SchedulerContext; import org.apache.linkis.scheduler.executer.ExecutorManager; import org.apache.linkis.scheduler.queue.ConsumerManager; import org.apache.linkis.scheduler.queue.GroupFactory; -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager; import org.apache.linkis.scheduler.queue.parallelqueue.ParallelScheduler; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; @@ -150,7 +147,7 @@ public EntranceInterceptor[] entranceInterceptors() { new ScalaCodeInterceptor(), new SQLLimitEntranceInterceptor(), new CommentInterceptor(), - new SetTenantLabelInterceptor(), + // new SetTenantLabelInterceptor(), new UserCreatorIPCheckInterceptor() }; } @@ -190,7 +187,7 @@ public GroupFactory groupFactory() { @Bean @ConditionalOnMissingBean public ConsumerManager consumerManager() { - return new ParallelConsumerManager( + return new EntranceParallelConsumerManager( ENTRANCE_SCHEDULER_MAX_PARALLELISM_USERS().getValue(), "EntranceJobScheduler"); } @@ -204,9 +201,7 @@ public SchedulerContext schedulerContext( @Bean @ConditionalOnMissingBean public ExecutorManager executorManager(GroupFactory groupFactory) { - EngineConnManagerBuilder engineConnManagerBuilder = EngineConnManagerBuilder$.MODULE$.builder(); - engineConnManagerBuilder.setPolicy(Policy.Process); - return new EntranceExecutorManagerImpl(groupFactory, engineConnManagerBuilder.build()); + return new EntranceExecutorManagerImpl(groupFactory); } @Bean diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java index cb37279c11..bee17b8ed4 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java @@ -26,4 +26,6 @@ private ServiceNameConsts() {} public static final String ENTRANCE_SERVER = "entranceServer"; public static final String ENTRANCE_INTERCEPTOR = "entranceInterceptors"; + + public static final String ENTRANCE_FAILOVER_SERVER = "entranceFailoverServer"; } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java index 2f045a1760..51a522d3d2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java @@ -62,6 +62,11 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { INVALID_RESULTSETS(20053, "Invalid resultsets, cannot use cache(结果集无效,无法使用 cache)"), SUBMITTING_QUERY_FAILED(30009, "Submitting the query failed(提交查询失败)!"), + + SUBMIT_CODE_ISEMPTY( + 30010, + "Submitting the execution code, after code preprocessing, the real execution code is empty, please check the executed code(提交的执行代码,经过预处理后为空,请检查执行的代码是否为空或则只有注解)!"), + QUERY_STATUS_FAILED(50081, "Query from jobHistory status failed(从 jobHistory 状态查询失败)"), GET_QUERY_RESPONSE(50081, "Get query response incorrectly(获取查询响应结果不正确)"), QUERY_TASKID_ERROR(50081, "Query task of taskId:{0} error(查询任务id:{}的任务出错)"), @@ -71,7 +76,11 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { SHELL_BLACKLISTED_CODE(50081, "Shell code contains blacklisted code(shell中包含黑名单代码)"), JOB_HISTORY_FAILED_ID(50081, ""), - LOGPATH_NOT_NULL(20301, "The logPath cannot be empty(日志路径不能为空)"); + LOGPATH_NOT_NULL(20301, "The logPath cannot be empty(日志路径不能为空)"), + + FAILOVER_RUNNING_TO_CANCELLED( + 30001, + "Job {0} failover, status changed from Running to Cancelled (任务故障转移,状态从Running变更为Cancelled)"); /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java index d9b33820fb..ca19f4d730 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java @@ -27,8 +27,10 @@ import org.apache.linkis.entrance.log.WebSocketCacheLogReader; import org.apache.linkis.entrance.log.WebSocketLogWriter; import org.apache.linkis.entrance.persistence.PersistenceManager; +import org.apache.linkis.entrance.utils.CommonLogPathUtils; import org.apache.linkis.governance.common.conf.GovernanceCommonConf; import org.apache.linkis.governance.common.constant.job.JobRequestConstants; +import org.apache.linkis.governance.common.entity.job.JobRequest; import org.apache.linkis.governance.common.protocol.task.RequestTask$; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.orchestrator.plans.ast.QueryParams$; @@ -125,11 +127,12 @@ public ExecuteRequest jobToExecuteRequest() throws EntranceErrorException { // add resultSet path root Map starupMapTmp = new HashMap<>(); Map starupMapOri = TaskUtils.getStartupMap(getParams()); + JobRequest jobRequest = getJobRequest(); if (starupMapOri.isEmpty()) { TaskUtils.addStartupMap(getParams(), starupMapOri); } if (!starupMapOri.containsKey(JobRequestConstants.JOB_REQUEST_LIST())) { - starupMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(getJobRequest().getId())); + starupMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(jobRequest.getId())); } for (Map.Entry entry : starupMapOri.entrySet()) { if (null != entry.getKey() && null != entry.getValue()) { @@ -142,7 +145,7 @@ public ExecuteRequest jobToExecuteRequest() throws EntranceErrorException { runtimeMapOri = TaskUtils.getRuntimeMap(getParams()); } if (!runtimeMapOri.containsKey(JobRequestConstants.JOB_ID())) { - runtimeMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(getJobRequest().getId())); + runtimeMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(jobRequest.getId())); } Map runtimeMapTmp = new HashMap<>(); for (Map.Entry entry : runtimeMapOri.entrySet()) { @@ -150,13 +153,21 @@ public ExecuteRequest jobToExecuteRequest() throws EntranceErrorException { runtimeMapTmp.put(entry.getKey(), entry.getValue().toString()); } } + String resultSetPathRoot = GovernanceCommonConf.RESULT_SET_STORE_PATH().getValue(runtimeMapTmp); + + if (!runtimeMapTmp.containsKey(GovernanceCommonConf.RESULT_SET_STORE_PATH().key())) { + String resultParentPath = CommonLogPathUtils.getResultParentPath(jobRequest); + CommonLogPathUtils.buildCommonPath(resultParentPath); + resultSetPathRoot = CommonLogPathUtils.getResultPath(jobRequest); + } + Map jobMap = new HashMap(); jobMap.put(RequestTask$.MODULE$.RESULT_SET_STORE_PATH(), resultSetPathRoot); runtimeMapOri.put(QueryParams$.MODULE$.JOB_KEY(), jobMap); - + jobRequest.setResultLocation(resultSetPathRoot); EntranceExecuteRequest executeRequest = new EntranceExecuteRequest(this); - List> labels = new ArrayList>(getJobRequest().getLabels()); + List> labels = new ArrayList>(jobRequest.getLabels()); executeRequest.setLabels(labels); return executeRequest; } @@ -224,26 +235,32 @@ public JobInfo getJobInfo() { : "not submit to ec"; StringBuffer sb = new StringBuffer(); - sb.append("Task creation time(任务创建时间): ") + sb.append("Task time point information(任务时间节点信息):\n") + .append("[Task creation time(任务创建时间)] :") .append(createTime) - .append(", Task scheduling time(任务调度时间): ") + .append("\n") + .append("[Task scheduling time(任务调度时间)]:") .append(scheduleTime) - .append(", Task start time(任务开始时间): ") + .append("\n") + .append("[Task start time(任务开始时间)] :") .append(startTime) - .append(", Mission end time(任务结束时间): ") + .append("\n") + .append("[Task end time(任务结束时间)] :") .append(endTime) .append("\n") .append(LogUtils.generateInfo("")) - .append("Task submit to Orchestrator time:") + .append("[Task submit to Orchestrator time]:") .append(jobToOrchestrator) - .append(", Task request EngineConn time:") + .append("\n") + .append("[Task request EngineConn time] :") .append(jobRequestEC) - .append(", Task submit to EngineConn time:") + .append("\n") + .append("[Task submit to EngineConn time] :") .append(jobSubmitToEC) .append("\n") .append( LogUtils.generateInfo( - "Your mission(您的任务) " + "Your task jobId(您的任务) " + this.getJobRequest().getId() + " The total time spent is(总耗时时间为): " + runTime)); @@ -269,4 +286,13 @@ public void close() throws IOException { logger.warn("Close logWriter and logReader failed. {}", e.getMessage(), e); } } + + @Override + public void clear() { + super.clear(); + this.setParams(null); + JobRequest jobRequest = this.getJobRequest(); + jobRequest.setExecutionCode(null); + jobRequest.setMetrics(null); + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java index dd80bc8e84..605d736b9f 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java @@ -138,7 +138,6 @@ public Job parseToJob(JobRequest jobReq) throws EntranceIllegalParamException { job.setEntranceContext(entranceContext); job.setListenerEventBus(null); job.setProgress(0f); - job.setJobRequest(jobReq); job.setCodeParser(new EmptyCodeParser()); return job; } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java index 69f15ea864..86af74d5c8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java @@ -66,7 +66,7 @@ public static void generateLogPath(JobRequest jobRequest, Map pa String creator = LabelUtil.getUserCreator(jobRequest.getLabels())._2; String umUser = jobRequest.getExecuteUser(); FsPath lopPrefixPath = new FsPath(logPathPrefix); - if (StorageUtils.HDFS.equals(lopPrefixPath.getFsType())) { + if (StorageUtils.HDFS().equals(lopPrefixPath.getFsType())) { String commonLogPath = logPathPrefix + "/" + "log" + "/" + dateString + "/" + creator; logPath = commonLogPath + "/" + umUser + "/" + jobRequest.getId() + ".log"; CommonLogPathUtils.buildCommonPath(commonLogPath); diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java index 44966fc1f1..b912b58ebb 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java @@ -164,6 +164,7 @@ public void onJobCompleted(Job job) { } cliHeartbeatMonitor.unRegisterIfCliJob(job); updateJobStatus(job); + job.clear(); } private void updateJobStatus(Job job) { @@ -190,7 +191,7 @@ private void updateJobStatus(Job job) { createPersistenceEngine().updateIfNeeded(jobRequest); } catch (ErrorException e) { entranceContext.getOrCreateLogManager().onLogUpdate(job, e.getMessage()); - logger.error("update job status failed, reason: ", e); + throw e; } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java new file mode 100644 index 0000000000..424e7ca170 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "entrance lable manager") +@RestController +@RequestMapping(path = "/entrance/operation/consumer") +public class EntranceConsumerRestfulApi { + + private EntranceServer entranceServer; + + private static final Logger logger = LoggerFactory.getLogger(EntranceConsumerRestfulApi.class); + + @Autowired + public void setEntranceServer(EntranceServer entranceServer) { + this.entranceServer = entranceServer; + } + + @ApiOperation(value = "kill-consumer", notes = "kill consumer", response = Message.class) + @RequestMapping(path = "/kill", method = RequestMethod.GET) + public Message killConsumer( + HttpServletRequest req, @RequestParam(value = "groupName") String groupName) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + logger.info("user {} to kill consumer {}", operationUser, groupName); + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + consumerManager.destroyConsumer(groupName); + logger.info("user {} finished to kill consumer {}", operationUser, groupName); + return Message.ok(); + } + + @ApiOperation(value = "consumer-info", notes = "list consumers info", response = Message.class) + @RequestMapping(path = "/info", method = RequestMethod.GET) + public Message countConsumer(HttpServletRequest req) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + return Message.ok().data("consumerNum", consumerManager.listConsumers().length); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java index 2ab457747c..f75586071e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java @@ -17,20 +17,31 @@ package org.apache.linkis.entrance.restful; +import org.apache.linkis.DataWorkCloudApplication; +import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; import org.apache.linkis.instance.label.client.InstanceLabelClient; import org.apache.linkis.manager.label.constant.LabelKeyConstant; import org.apache.linkis.manager.label.constant.LabelValueConstant; +import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.protocol.label.InsLabelRefreshRequest; +import org.apache.linkis.protocol.label.InsLabelRemoveRequest; import org.apache.linkis.rpc.Sender; +import org.apache.linkis.scheduler.SchedulerContext; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; + +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; import java.util.HashMap; +import java.util.List; import java.util.Map; import com.fasterxml.jackson.databind.JsonNode; @@ -46,6 +57,14 @@ public class EntranceLabelRestfulApi { private static final Logger logger = LoggerFactory.getLogger(EntranceLabelRestfulApi.class); + private EntranceServer entranceServer; + + @Autowired + public void setEntranceServer(EntranceServer entranceServer) { + this.entranceServer = entranceServer; + } + + private static Boolean offlineFlag = false; @ApiOperation(value = "update", notes = "update route label", response = Message.class) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @@ -72,13 +91,67 @@ public Message updateRouteLabel(HttpServletRequest req, @RequestBody JsonNode js public Message updateRouteLabel(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "markoffline"); Map labels = new HashMap(); - logger.info("Prepare to modify the routelabel of entry to offline"); + logger.info("Prepare to modify the routelabel of entrance to offline"); labels.put(LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE); InsLabelRefreshRequest insLabelRefreshRequest = new InsLabelRefreshRequest(); insLabelRefreshRequest.setLabels(labels); insLabelRefreshRequest.setServiceInstance(Sender.getThisServiceInstance()); InstanceLabelClient.getInstance().refreshLabelsToInstance(insLabelRefreshRequest); + synchronized (offlineFlag) { + offlineFlag = true; + } logger.info("Finished to modify the routelabel of entry to offline"); + + logger.info("Prepare to update all not execution task instances to empty string"); + SchedulerContext schedulerContext = + entranceServer.getEntranceContext().getOrCreateScheduler().getSchedulerContext(); + if (schedulerContext instanceof EntranceSchedulerContext) { + ((EntranceSchedulerContext) schedulerContext).setOfflineFlag(true); + } + entranceServer.updateAllNotExecutionTaskInstances(true); + logger.info("Finished to update all not execution task instances to empty string"); + return Message.ok(); } + + @ApiOperation( + value = "backonline", + notes = "from offline status to recover", + response = Message.class) + @RequestMapping(path = "/backonline", method = RequestMethod.GET) + public Message backOnline(HttpServletRequest req) { + ModuleUserUtils.getOperationUser(req, "backonline"); + logger.info("Prepare to modify the routelabel of entrance to remove offline"); + InsLabelRemoveRequest insLabelRemoveRequest = new InsLabelRemoveRequest(); + insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().removeLabelsFromInstance(insLabelRemoveRequest); + synchronized (offlineFlag) { + offlineFlag = false; + } + logger.info("Finished to backonline"); + return Message.ok(); + } + + @ApiOperation(value = "isOnline", notes = "entrance isOnline", response = Message.class) + @RequestMapping(path = "/isOnline", method = RequestMethod.GET) + public Message isOnline(HttpServletRequest req) { + String thisInstance = Sender.getThisInstance(); + ServiceInstance mainInstance = DataWorkCloudApplication.getServiceInstance(); + ServiceInstance serviceInstance = new ServiceInstance(); + serviceInstance.setApplicationName(mainInstance.getApplicationName()); + serviceInstance.setInstance(thisInstance); + List> labelFromInstance = + InstanceLabelClient.getInstance().getLabelFromInstance(serviceInstance); + boolean res = true; + String offline = "offline"; + if (!CollectionUtils.isEmpty(labelFromInstance)) { + for (Label label : labelFromInstance) { + if (offline.equals(label.getValue())) { + res = false; + } + } + } + logger.info("Whether Entrance is online: {}", res); + return Message.ok().data("isOnline", res); + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java index 7d36df8fec..7b487352d5 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java @@ -20,8 +20,7 @@ import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.entrance.EntranceServer; import org.apache.linkis.entrance.execute.EntranceJob; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.manager.label.utils.LabelUtil; +import org.apache.linkis.entrance.scheduler.CreatorECTypeDefaultConf; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -67,7 +66,7 @@ public Message taskinfo( HttpServletRequest req, @RequestParam(value = "user", required = false) String user, @RequestParam(value = "creator", required = false) String creator, - @RequestParam(value = "engineTypeLabel", required = false) String engineTypeLabelValue) { + @RequestParam(value = "ecType", required = false) String ecType) { String userName = ModuleUserUtils.getOperationUser(req, "taskinfo"); String queryUser = user; if (Configuration.isNotAdmin(userName)) { @@ -83,23 +82,12 @@ public Message taskinfo( } else if (StringUtils.isBlank(creator)) { filterWords = queryUser; } - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords); - int taskNumber = 0; + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords, ecType); int runningNumber = 0; int queuedNumber = 0; if (null != undoneTasks) { for (EntranceJob task : undoneTasks) { - if (StringUtils.isNotBlank(engineTypeLabelValue)) { - EngineTypeLabel engineTypeLabel = - LabelUtil.getEngineTypeLabel(task.getJobRequest().getLabels()); - // Task types do not match, do not count - if (null == engineTypeLabel - || !engineTypeLabelValue.equalsIgnoreCase(engineTypeLabel.getStringValue())) { - continue; - } - } - taskNumber++; if (task.isRunning()) { runningNumber++; } else { @@ -107,17 +95,25 @@ public Message taskinfo( } } } - return Message.ok("success") - .data("taskNumber", taskNumber) - .data("runningNumber", runningNumber) - .data("queuedNumber", queuedNumber); + Message resp = + Message.ok("success") + .data("taskNumber", undoneTasks.length) + .data("runningNumber", runningNumber) + .data("queuedNumber", queuedNumber); + if (StringUtils.isNoneBlank(creator, ecType)) { + int creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creator, ecType); + resp.data("creatorECTypeMaxRunningJobs", creatorECTypeMaxRunningJobs); + resp.data("limitExceeded", runningNumber > creatorECTypeMaxRunningJobs); + } + return resp; } - @ApiOperation(value = "Status", notes = "get running task number ", response = Message.class) + @ApiOperation(value = "runningtask", notes = "get running task number ", response = Message.class) @RequestMapping(path = "/runningtask", method = RequestMethod.GET) - public Message status(HttpServletRequest req) { + public Message runningtask(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "runningtask"); - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(""); + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask("", null); Boolean isCompleted = false; if (null == undoneTasks || undoneTasks.length < 1) { isCompleted = true; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java index c1479efd8a..57d2ed04d0 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java @@ -29,15 +29,19 @@ import org.apache.linkis.governance.common.entity.job.JobRequest; import org.apache.linkis.manager.common.protocol.resource.ResourceWithStatus; import org.apache.linkis.protocol.constants.TaskConstant; +import org.apache.linkis.protocol.engine.JobInstance; import org.apache.linkis.protocol.engine.JobProgressInfo; import org.apache.linkis.protocol.utils.ZuulEntranceUtils; import org.apache.linkis.rpc.Sender; import org.apache.linkis.scheduler.listener.LogListener; import org.apache.linkis.scheduler.queue.Job; import org.apache.linkis.scheduler.queue.SchedulerEventState; +import org.apache.linkis.server.BDPJettyServerHelper; import org.apache.linkis.server.Message; +import org.apache.linkis.server.conf.ServerConfiguration; import org.apache.linkis.server.security.SecurityFilter; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.linkis.utils.LinkisSpringUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -61,6 +65,7 @@ import scala.Option; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; import io.swagger.annotations.Api; @@ -96,7 +101,11 @@ public void setEntranceServer(EntranceServer entranceServer) { @RequestMapping(path = "/execute", method = RequestMethod.POST) public Message execute(HttpServletRequest req, @RequestBody Map json) { Message message = null; - logger.info("Begin to get an execID"); + String operationUser = ModuleUserUtils.getOperationUser(req); + logger.info( + "Begin to get execute task for user {}, Client IP {}", + operationUser, + LinkisSpringUtils.getClientIP(req)); json.put(TaskConstant.EXECUTE_USER, ModuleUserUtils.getOperationUser(req)); json.put(TaskConstant.SUBMIT_USER, SecurityFilter.getLoginUsername(req)); HashMap map = (HashMap) json.get(TaskConstant.SOURCE); @@ -110,15 +119,6 @@ public Message execute(HttpServletRequest req, @RequestBody Map JobRequest jobReq = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobReq.getId(); ModuleUserUtils.getOperationUser(req, "execute task,id: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobReq.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); String execID = ZuulEntranceUtils.generateExecID( job.getId(), @@ -132,7 +132,7 @@ public Message execute(HttpServletRequest req, @RequestBody Map + jobReqId + " in " + Sender.getThisServiceInstance().toString() - + ". Please wait it to be scheduled"), + + ". \n Please wait it to be scheduled(您的任务已经提交,进入排队中,如果一直没有更新日志,是任务并发达到了限制,可以进行参数修改)"), job); message = Message.ok(); message.setMethod("/api/entrance/execute"); @@ -148,9 +148,25 @@ public Message execute(HttpServletRequest req, @RequestBody Map @RequestMapping(path = "/submit", method = RequestMethod.POST) public Message submit(HttpServletRequest req, @RequestBody Map json) { Message message = null; - logger.info("Begin to get an execID"); - json.put(TaskConstant.EXECUTE_USER, ModuleUserUtils.getOperationUser(req)); + String executeUser = ModuleUserUtils.getOperationUser(req); + logger.info( + "Begin to get execute task for user {}, Client IP {}", + executeUser, + LinkisSpringUtils.getClientIP(req)); json.put(TaskConstant.SUBMIT_USER, SecurityFilter.getLoginUsername(req)); + String token = ModuleUserUtils.getToken(req); + Object tempExecuteUser = json.get(TaskConstant.EXECUTE_USER); + // check special admin token + if (StringUtils.isNotBlank(token) && tempExecuteUser != null) { + if (Configuration.isAdminToken(token)) { + logger.warn( + "ExecuteUser variable will be replaced by system value: {} -> {}", + tempExecuteUser, + executeUser); + executeUser = String.valueOf(tempExecuteUser); + } + } + json.put(TaskConstant.EXECUTE_USER, executeUser); HashMap map = (HashMap) json.get(TaskConstant.SOURCE); if (map == null) { map = new HashMap<>(); @@ -162,15 +178,6 @@ public Message submit(HttpServletRequest req, @RequestBody Map j JobRequest jobRequest = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobRequest.getId(); ModuleUserUtils.getOperationUser(req, "submit jobReqId: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobRequest.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); pushLog( LogUtils.generateInfo( "Your job is accepted, jobID is " @@ -179,7 +186,7 @@ public Message submit(HttpServletRequest req, @RequestBody Map j + jobReqId + " in " + Sender.getThisServiceInstance().toString() - + ". Please wait it to be scheduled"), + + ". \n Please wait it to be scheduled(您的任务已经提交,进入排队中,如果一直没有更新日志,是任务并发达到了限制,可以进行参数修改)"), job); String execID = ZuulEntranceUtils.generateExecID( @@ -198,6 +205,13 @@ private void pushLog(String log, Job job) { entranceServer.getEntranceContext().getOrCreateLogManager().onLogUpdate(job, log); } + private JobInstance parseHeaderToJobInstance(HttpServletRequest req) + throws JsonProcessingException { + String jobStr = + req.getHeader(ServerConfiguration.LINKIS_SERVER_ENTRANCE_HEADER_KEY().getValue()); + return BDPJettyServerHelper.gson().fromJson(jobStr, JobInstance.class); + } + @ApiOperation(value = "status", notes = "get task stats", response = Message.class) @ApiImplicitParams({ @ApiImplicitParam(name = "taskID", required = false, dataType = "String", value = " task id"), @@ -209,28 +223,74 @@ public Message status( HttpServletRequest req, @PathVariable("id") String id, @RequestParam(value = "taskID", required = false) String taskID) { + ModuleUserUtils.getOperationUser(req, "job status"); Message message = null; - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "status realId: " + realId); - Option job = Option.apply(null); + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/status"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/status"); + message.data("status", jobInstance.status()).data("execID", "").data("taskID", id); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return status is Inited", id); + String status = SchedulerEventState.Inited().toString(); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/status"); + message.data("status", status).data("execID", "").data("taskID", id); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } + + Option job = null; try { job = entranceServer.getJob(realId); } catch (Exception e) { - logger.warn("获取任务 {} 状态时出现错误", realId, e.getMessage()); + logger.warn("get {} status error", realId, e); + if (StringUtils.isEmpty(taskID)) { + message = + Message.error( + "Get job by ID error and cannot obtain the corresponding task status.(获取job时发生异常,不能获取相应的任务状态)"); + return message; + } long realTaskID = Long.parseLong(taskID); String status = JobHistoryHelper.getStatusByTaskID(realTaskID); message = Message.ok(); message.setMethod("/api/entrance/" + id + "/status"); - message.data("status", status).data("execID", id); + message.data("status", status).data("execID", execID); return message; } - if (job.isDefined()) { + if (job != null && job.isDefined()) { if (job.get() instanceof EntranceJob) { ((EntranceJob) job.get()).updateNewestAccessByClientTimestamp(); } message = Message.ok(); message.setMethod("/api/entrance/" + id + "/status"); - message.data("status", job.get().getState().toString()).data("execID", id); + message.data("status", job.get().getState().toString()).data("execID", execID); } else { message = Message.error( @@ -246,9 +306,56 @@ public Message status( @Override @RequestMapping(path = "/{id}/progress", method = RequestMethod.GET) public Message progress(HttpServletRequest req, @PathVariable("id") String id) { + ModuleUserUtils.getOperationUser(req, "job progress"); Message message = null; - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "progress realId: " + realId); + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/progress"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progress"); + message + .data("progress", "1.0") + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return progress is 0", id); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progress"); + message + .data("progress", 0) + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } + Option job = null; try { job = entranceServer.getJob(realId); @@ -275,7 +382,7 @@ public Message progress(HttpServletRequest req, @PathVariable("id") String id) { message .data("progress", Math.abs(job.get().getProgress())) - .data("execID", id) + .data("execID", execID) .data("progressInfo", list); } } else { @@ -296,9 +403,60 @@ public Message progress(HttpServletRequest req, @PathVariable("id") String id) { @Override @RequestMapping(path = "/{id}/progressWithResource", method = RequestMethod.GET) public Message progressWithResource(HttpServletRequest req, @PathVariable("id") String id) { + ModuleUserUtils.getOperationUser(req, "job progressWithResource"); Message message = null; - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "progressWithResource realId: " + realId); + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/progressWithResource"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + long realTaskID = Long.parseLong(id); + JobRequest jobRequest = JobHistoryHelper.getTaskByTaskID(realTaskID); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progressWithResource"); + Map metricsVo = new HashMap<>(); + buildYarnResource(jobRequest, metricsVo, message); + message + .data("progress", "1.0") + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return progress is 0 and resource is null", id); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progressWithResource"); + message + .data(TaskConstant.JOB_YARNRESOURCE, null) + .data("progress", 0) + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } Option job = null; try { job = entranceServer.getJob(realId); @@ -324,57 +482,12 @@ public Message progressWithResource(HttpServletRequest req, @PathVariable("id") message.setMethod("/api/entrance/" + id + "/progressWithResource"); JobRequest jobRequest = ((EntranceJob) job.get()).getJobRequest(); - Map metrics = jobRequest.getMetrics(); Map metricsVo = new HashMap<>(); - if (metrics.containsKey(TaskConstant.JOB_YARNRESOURCE)) { - HashMap resourceMap = - (HashMap) metrics.get(TaskConstant.JOB_YARNRESOURCE); - ArrayList resoureList = new ArrayList<>(12); - if (null != resourceMap && !resourceMap.isEmpty()) { - resourceMap.forEach( - (applicationId, resource) -> { - resoureList.add(new YarnResourceWithStatusVo(applicationId, resource)); - }); - metricsVo.put(TaskConstant.JOB_YARNRESOURCE, resoureList); - Optional cores = - resourceMap.values().stream() - .map(resource -> resource.getQueueCores()) - .reduce((x, y) -> x + y); - Optional memory = - resourceMap.values().stream() - .map(resource -> resource.queueMemory()) - .reduce((x, y) -> x + y); - float corePercent = 0.0f; - float memoryPercent = 0.0f; - if (cores.isPresent() && memory.isPresent()) { - corePercent = - cores.get().floatValue() - / EntranceConfiguration.YARN_QUEUE_CORES_MAX().getHotValue(); - memoryPercent = - memory.get().floatValue() - / (EntranceConfiguration.YARN_QUEUE_MEMORY_MAX().getHotValue().longValue() - * 1024 - * 1024 - * 1024); - } - String coreRGB = RGBUtils.getRGB(corePercent); - String memoryRGB = RGBUtils.getRGB(memoryPercent); - metricsVo.put(TaskConstant.JOB_CORE_PERCENT, corePercent); - metricsVo.put(TaskConstant.JOB_MEMORY_PERCENT, memoryPercent); - metricsVo.put(TaskConstant.JOB_CORE_RGB, coreRGB); - metricsVo.put(TaskConstant.JOB_MEMORY_RGB, memoryRGB); - - message.data(TaskConstant.JOB_YARN_METRICS, metricsVo); - } else { - message.data(TaskConstant.JOB_YARNRESOURCE, null); - } - } else { - message.data(TaskConstant.JOB_YARNRESOURCE, null); - } + buildYarnResource(jobRequest, metricsVo, message); message .data("progress", Math.abs(job.get().getProgress())) - .data("execID", id) + .data("execID", execID) .data("progressInfo", list); } } else { @@ -385,6 +498,60 @@ public Message progressWithResource(HttpServletRequest req, @PathVariable("id") return message; } + private void buildYarnResource( + JobRequest jobRequest, Map metricsVo, Message message) { + try { + Map metrics = jobRequest.getMetrics(); + if (metrics.containsKey(TaskConstant.JOB_YARNRESOURCE)) { + + HashMap resourceMap = + (HashMap) metrics.get(TaskConstant.JOB_YARNRESOURCE); + ArrayList resoureList = new ArrayList<>(12); + if (null != resourceMap && !resourceMap.isEmpty()) { + resourceMap.forEach( + (applicationId, resource) -> { + resoureList.add(new YarnResourceWithStatusVo(applicationId, resource)); + }); + metricsVo.put(TaskConstant.JOB_YARNRESOURCE, resoureList); + Optional cores = + resourceMap.values().stream() + .map(resource -> resource.queueCores()) + .reduce((x, y) -> x + y); + Optional memory = + resourceMap.values().stream() + .map(resource -> resource.queueMemory()) + .reduce((x, y) -> x + y); + float corePercent = 0.0f; + float memoryPercent = 0.0f; + if (cores.isPresent() && memory.isPresent()) { + corePercent = + cores.get().floatValue() / EntranceConfiguration.YARN_QUEUE_CORES_MAX().getValue(); + memoryPercent = + memory.get().floatValue() + / (EntranceConfiguration.YARN_QUEUE_MEMORY_MAX().getValue().longValue() + * 1024 + * 1024 + * 1024); + } + String coreRGB = RGBUtils.getRGB(corePercent); + String memoryRGB = RGBUtils.getRGB(memoryPercent); + metricsVo.put(TaskConstant.JOB_CORE_PERCENT, corePercent); + metricsVo.put(TaskConstant.JOB_MEMORY_PERCENT, memoryPercent); + metricsVo.put(TaskConstant.JOB_CORE_RGB, coreRGB); + metricsVo.put(TaskConstant.JOB_MEMORY_RGB, memoryRGB); + + message.data(TaskConstant.JOB_YARN_METRICS, metricsVo); + } else { + message.data(TaskConstant.JOB_YARNRESOURCE, null); + } + } else { + message.data(TaskConstant.JOB_YARNRESOURCE, null); + } + } catch (Exception e) { + logger.error("build yarnResource error", e); + } + } + private void setJobProgressInfos( List> list, JobProgressInfo jobProgressInfo) { Map map = new HashMap<>(); @@ -403,10 +570,78 @@ private void setJobProgressInfos( @Override @RequestMapping(path = "/{id}/log", method = RequestMethod.GET) public Message log(HttpServletRequest req, @PathVariable("id") String id) { - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "log realId: " + realId); - Option job = Option.apply(null); + ModuleUserUtils.getOperationUser(req, "get job log"); Message message = null; + int fromLine = 0; + int size = 100; + boolean distinctLevel = true; + String fromLineStr = req.getParameter("fromLine"); + String sizeStr = req.getParameter("size"); + if (StringUtils.isNotBlank(fromLineStr)) { + fromLine = Math.max(Integer.parseInt(fromLineStr), 0); + } + if (StringUtils.isNotBlank(sizeStr)) { + size = Integer.parseInt(sizeStr) >= 0 ? Integer.parseInt(sizeStr) : 10000; + } + String distinctLevelStr = req.getParameter("distinctLevel"); + if ("false".equals(distinctLevelStr)) { + distinctLevel = false; + } + + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/log"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = + Message.error( + "The job you just executed has ended. This interface no longer provides a query. It is recommended that you download the log file for viewing.(您刚刚执行的job已经结束,本接口不再提供查询,建议您下载日志文件进行查看)"); + message.setMethod("/api/entrance/" + id + "/log"); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return customer log", id); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/log"); + String log = + LogUtils.generateInfo( + "The job will failover soon, please try again later.(job很快就会failover,请稍后再试)"); + Object retLog; + if (distinctLevel) { + String[] array = new String[4]; + array[2] = log; + array[3] = log; + retLog = new ArrayList(Arrays.asList(array)); + } else { + retLog = log; + } + message.data("log", retLog).data("execID", "").data("taskID", id).data("fromLine", 0); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } + + Option job = null; try { job = entranceServer.getJob(realId); } catch (final Throwable t) { @@ -416,27 +651,10 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { message.setMethod("/api/entrance/" + id + "/log"); return message; } - if (job.isDefined()) { + if (job != null && job.isDefined()) { logger.debug("begin to get log for {}(开始获取 {} 的日志)", job.get().getId(), job.get().getId()); LogReader logReader = entranceServer.getEntranceContext().getOrCreateLogManager().getLogReader(realId); - int fromLine = 0; - int size = 100; - boolean distinctLevel = true; - if (req != null) { - String fromLineStr = req.getParameter("fromLine"); - String sizeStr = req.getParameter("size"); - if (StringUtils.isNotBlank(fromLineStr)) { - fromLine = Math.max(Integer.parseInt(fromLineStr), 0); - } - if (StringUtils.isNotBlank(sizeStr)) { - size = Integer.parseInt(sizeStr) >= 0 ? Integer.parseInt(sizeStr) : 10000; - } - String distinctLevelStr = req.getParameter("distinctLevel"); - if ("false".equals(distinctLevelStr)) { - distinctLevel = false; - } - } Object retLog = null; int retFromLine = 0; @@ -458,7 +676,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { e); message = Message.ok(); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", "").data("execID", execID).data("fromLine", retFromLine + fromLine); } catch (final IllegalArgumentException e) { logger.debug( "Failed to get log information for :{}(为 {} 获取日志失败)", @@ -467,7 +685,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { e); message = Message.ok(); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", "").data("execID", execID).data("fromLine", retFromLine + fromLine); return message; } catch (final Exception e1) { logger.debug( @@ -477,7 +695,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { e1); message = Message.error("Failed to get log information(获取日志信息失败)"); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", "").data("execID", execID).data("fromLine", retFromLine + fromLine); return message; } finally { if (null != logReader && job.get().isCompleted()) { @@ -486,7 +704,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { } message = Message.ok(); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", retLog).data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", retLog).data("execID", execID).data("fromLine", retFromLine + fromLine); logger.debug("success to get log for {} (获取 {} 日志成功)", job.get().getId(), job.get().getId()); } else { message = @@ -514,7 +732,6 @@ public Message killJobs( JsonNode taskIDNode = jsonNode.get("taskIDList"); ArrayList waitToForceKill = new ArrayList<>(); String userName = ModuleUserUtils.getOperationUser(req, "killJobs"); - if (idNode.size() != taskIDNode.size()) { return Message.error( "The length of the ID list does not match the length of the TASKID list(id列表的长度与taskId列表的长度不一致)"); @@ -527,7 +744,7 @@ public Message killJobs( String id = idNode.get(i).asText(); Long taskID = taskIDNode.get(i).asLong(); String realId = ZuulEntranceUtils.parseExecID(id)[3]; - Option job = Option.apply(null); + Option job = null; try { job = entranceServer.getJob(realId); } catch (Exception e) { @@ -541,7 +758,7 @@ public Message killJobs( continue; } Message message = null; - if (job.isEmpty()) { + if (job == null || job.isEmpty()) { logger.warn("can not find a job in entranceServer, will force to kill it"); waitToForceKill.add(taskID); message = Message.ok("Forced Kill task (强制杀死任务)"); @@ -577,11 +794,12 @@ public Message killJobs( if (null != logListener) { logListener.onLogUpdate( entranceJob, - "Job " - + jobReq.getId() - + " was kill by user successfully(任务" - + jobReq.getId() - + "已成功取消)"); + LogUtils.generateInfo( + "Job " + + jobReq.getId() + + " was kill by user successfully(任务" + + jobReq.getId() + + "已成功取消)")); } this.entranceServer .getEntranceContext() @@ -594,9 +812,9 @@ public Message killJobs( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)"); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)", + t); message.setMethod("/api/entrance/" + id + "/kill"); - message.setStatus(1); } } messages.add(message); @@ -609,7 +827,7 @@ public Message killJobs( @ApiOperation(value = "kill", notes = "kill", response = Message.class) @ApiImplicitParams({ - @ApiImplicitParam(name = "id", required = true, dataType = "String", value = "excute id"), + @ApiImplicitParam(name = "id", required = true, dataType = "String", value = "exec id"), @ApiImplicitParam(name = "taskID", required = false, dataType = "String", value = "task id") }) @Override @@ -618,23 +836,68 @@ public Message kill( HttpServletRequest req, @PathVariable("id") String id, @RequestParam(value = "taskID", required = false) Long taskID) { - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - String userName = ModuleUserUtils.getOperationUser(req, "kill task realId:" + realId); + String userName = ModuleUserUtils.getOperationUser(req, "kill job"); + Message message = null; + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/kill"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = Message.error("The job already completed. Do not support kill.(任务已经结束,不支持kill)"); + message.setMethod("/api/entrance/" + id + "/kill"); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, but now force kill", id); + // TODO If failover during force kill, the job status may change from Cancelled to Running + long taskId = Long.parseLong(id); + JobHistoryHelper.forceKill(taskId); + message = Message.ok("Forced Kill task (强制杀死任务)"); + message.setMethod("/api/entrance/" + id + "/kill"); + message.data("execID", "").data("taskID", id); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } - Option job = Option.apply(null); + Option job = null; try { job = entranceServer.getJob(realId); } catch (Exception e) { logger.warn("can not find a job in entranceServer, will force to kill it", e); // 如果在内存中找不到该任务,那么该任务可能已经完成了,或者就是重启导致的 + if (taskID == null || taskID <= 0) { + message = Message.error("Get job by ID error, kill failed.(获取job时发生异常,kill失败)"); + return message; + } JobHistoryHelper.forceKill(taskID); - Message message = Message.ok("Forced Kill task (强制杀死任务)"); + message = Message.ok("Forced Kill task (强制杀死任务)"); message.setMethod("/api/entrance/" + id + "/kill"); message.setStatus(0); return message; } - Message message = null; - if (job.isEmpty()) { + + if (job == null || job.isEmpty()) { logger.warn("can not find a job in entranceServer, will force to kill it"); // 如果在内存中找不到该任务,那么该任务可能已经完成了,或者就是重启导致的 JobHistoryHelper.forceKill(taskID); @@ -660,8 +923,7 @@ public Message kill( job.get().kill(); message = Message.ok("Successfully killed the job(成功kill了job)"); message.setMethod("/api/entrance/" + id + "/kill"); - message.setStatus(0); - message.data("execID", id); + message.data("execID", execID); // ensure the job's state is cancelled in database if (job.get() instanceof EntranceJob) { EntranceJob entranceJob = (EntranceJob) job.get(); @@ -678,10 +940,11 @@ public Message kill( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)" - + "message: " - + t.getMessage()); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败) with error:" + + t.getMessage(), + t); message.setMethod("/api/entrance/" + id + "/kill"); + message.setStatus(1); } } return message; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java new file mode 100644 index 0000000000..5a91c71a11 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.conf.EntranceConfiguration; +import org.apache.linkis.entrance.utils.EntranceUtils; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.apache.linkis.rpc.Sender; + +import org.apache.commons.lang3.StringUtils; + +import java.util.concurrent.TimeUnit; + +import scala.Tuple2; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CreatorECTypeDefaultConf { + + private static final Logger logger = LoggerFactory.getLogger(CreatorECTypeDefaultConf.class); + + public static Sender confSender = + Sender.getSender( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()); + + private static LoadingCache confCache = + CacheBuilder.newBuilder() + .maximumSize(1000) + .expireAfterWrite( + (long) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE().getValue(), + TimeUnit.MINUTES) + .build( + new CacheLoader() { + @Override + public Integer load(String key) throws Exception { + Tuple2 tuple2 = + EntranceUtils.fromKeyGetLabels(key); + RequestQueryEngineConfig requestQueryEngineConfig = + new RequestQueryEngineConfig(tuple2._1, tuple2._2(), null); + int jobLimit = + (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + try { + Object response = confSender.ask(requestQueryEngineConfig); + if (response instanceof ResponseQueryConfig) { + jobLimit = + (int) + EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT() + .getValue(((ResponseQueryConfig) response).getKeyAndValue()); + } + } catch (Exception e) { + logger.warn("Failed to get key {} from conf", key, e); + } + return jobLimit; + } + }); + + public static int getCreatorECTypeMaxRunningJobs(String creator, String ecType) { + int jobLimit = (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + if (StringUtils.isNoneBlank(creator, ecType)) { + try { + String key = EntranceUtils.getDefaultCreatorECTypeKey(creator, ecType); + jobLimit = confCache.get(key); + } catch (Exception e) { + logger.warn("Failed to get key creator {} ecType {} from cache", creator, ecType, e); + } + } + int entranceNumber = EntranceUtils.getRunningEntranceNumber(); + return jobLimit / entranceNumber; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java index 7558ab6dc2..7c38d27947 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java @@ -20,13 +20,17 @@ import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.entrance.EntranceContext; import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.conf.EntranceConfiguration; import org.apache.linkis.entrance.conf.EntranceConfiguration$; import org.apache.linkis.entrance.constant.ServiceNameConsts; import org.apache.linkis.entrance.execute.EntranceJob; +import org.apache.linkis.entrance.job.EntranceExecutionJob; import org.apache.linkis.entrance.log.LogReader; import org.apache.linkis.governance.common.protocol.conf.EntranceInstanceConfRequest; import org.apache.linkis.rpc.Sender; +import org.apache.commons.io.IOUtils; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.event.ContextClosedEvent; import org.springframework.context.event.EventListener; @@ -94,13 +98,19 @@ private void shutdownEntrance(ContextClosedEvent event) { if (shutdownFlag) { logger.warn("event has been handled"); } else { + if (EntranceConfiguration.ENTRANCE_SHUTDOWN_FAILOVER_CONSUME_QUEUE_ENABLED()) { + logger.warn("Entrance exit to update and clean all ConsumeQueue task instances"); + updateAllNotExecutionTaskInstances(false); + } + logger.warn("Entrance exit to stop all job"); - EntranceJob[] allUndoneJobs = getAllUndoneTask(null); - if (null != allUndoneJobs) { - for (EntranceJob job : allUndoneJobs) { + EntranceJob[] allUndoneTask = getAllUndoneTask(null, null); + if (null != allUndoneTask) { + for (EntranceJob job : allUndoneTask) { job.onFailure( "Your job will be marked as canceled because the Entrance service restarted(因为Entrance服务重启,您的任务将被标记为取消)", null); + IOUtils.closeQuietly(((EntranceExecutionJob) job).getLogWriter().get()); } } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java new file mode 100644 index 0000000000..4e66da5cc3 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java @@ -0,0 +1,180 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.server; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.conf.EntranceConfiguration; +import org.apache.linkis.entrance.constant.ServiceNameConsts; +import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; +import org.apache.linkis.entrance.utils.JobHistoryHelper; +import org.apache.linkis.governance.common.entity.job.JobRequest; +import org.apache.linkis.publicservice.common.lock.entity.CommonLock; +import org.apache.linkis.publicservice.common.lock.service.CommonLockService; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.scheduler.queue.SchedulerEventState; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.*; +import java.util.stream.Collectors; + +import scala.Enumeration; +import scala.collection.Iterator; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component(ServiceNameConsts.ENTRANCE_FAILOVER_SERVER) +public class EntranceFailoverJobServer { + + private static final Logger logger = LoggerFactory.getLogger(EntranceFailoverJobServer.class); + + @Autowired private EntranceServer entranceServer; + + @Autowired private CommonLockService commonLockService; + + private static String ENTRANCE_FAILOVER_LOCK = "ENTRANCE_FAILOVER_LOCK"; + + private ScheduledExecutorService scheduledExecutor; + + private Future future; + + @PostConstruct + public void init() { + if (EntranceConfiguration.ENTRANCE_FAILOVER_ENABLED()) { + this.scheduledExecutor = + Executors.newSingleThreadScheduledExecutor( + Utils.threadFactory("Linkis-Failover-Scheduler-Thread-", true)); + failoverTask(); + } + } + + @EventListener + private void shutdownFailover(ContextClosedEvent event) { + if (future != null && !future.isDone()) { + future.cancel(true); + } + if (scheduledExecutor != null) { + scheduledExecutor.shutdown(); + logger.info("Entrance Failover Server exit!"); + } + } + + public void failoverTask() { + future = + scheduledExecutor.scheduleWithFixedDelay( + () -> { + EntranceSchedulerContext schedulerContext = + (EntranceSchedulerContext) + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext(); + + // entrance do not failover job when it is offline + if (schedulerContext.getOfflineFlag()) return; + + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(ENTRANCE_FAILOVER_LOCK); + Boolean locked = false; + try { + locked = commonLockService.lock(commonLock, 30 * 1000L); + if (!locked) return; + logger.info("success locked {}", ENTRANCE_FAILOVER_LOCK); + + // get all entrance server from eureka + ServiceInstance[] serviceInstances = + Sender.getInstances(Sender.getThisServiceInstance().getApplicationName()); + if (serviceInstances == null || serviceInstances.length <= 0) return; + + // serverInstance to map + Map serverInstanceMap = + Arrays.stream(serviceInstances) + .collect( + Collectors.toMap( + ServiceInstance::getInstance, + ServiceInstance::getRegistryTimestamp, + (k1, k2) -> k2)); + + // It is very important to avoid repeated execute job + // when failover self job, if self instance is empty, the job can be repeated + // execute + if (!serverInstanceMap.containsKey(Sender.getThisInstance())) { + logger.warn( + "server has just started and has not get self info, it does not failover"); + return; + } + + // get failover job expired time (获取任务故障转移过期时间,配置为0表示不过期, 过期则不处理) + long expiredTimestamp = 0L; + if (EntranceConfiguration.ENTRANCE_FAILOVER_DATA_INTERVAL_TIME() > 0) { + expiredTimestamp = + System.currentTimeMillis() + - EntranceConfiguration.ENTRANCE_FAILOVER_DATA_INTERVAL_TIME(); + } + + List jobRequests = + JobHistoryHelper.queryWaitForFailoverTask( + serverInstanceMap, + getUnCompleteStatus(), + expiredTimestamp, + EntranceConfiguration.ENTRANCE_FAILOVER_DATA_NUM_LIMIT()); + if (jobRequests.isEmpty()) return; + List ids = + jobRequests.stream().map(JobRequest::getId).collect(Collectors.toList()); + logger.info("success query failover jobs , job size: {}, ids: {}", ids.size(), ids); + + // failover to local server + for (JobRequest jobRequest : jobRequests) { + entranceServer.failoverExecute(jobRequest); + } + logger.info("finished execute failover jobs, job ids: {}", ids); + + } catch (Exception e) { + logger.error("failover failed", e); + } finally { + if (locked) commonLockService.unlock(commonLock); + } + }, + EntranceConfiguration.ENTRANCE_FAILOVER_SCAN_INIT_TIME(), + EntranceConfiguration.ENTRANCE_FAILOVER_SCAN_INTERVAL(), + TimeUnit.MILLISECONDS); + } + + private List getUnCompleteStatus() { + List status = new ArrayList<>(); + Enumeration.ValueSet values = SchedulerEventState.values(); + Iterator iterator = values.iterator(); + while (iterator.hasNext()) { + Enumeration.Value next = iterator.next(); + if (!SchedulerEventState.isCompleted(next)) status.add(next.toString()); + } + return status; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala index 1035de1e2b..a610d524b2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala @@ -17,28 +17,42 @@ package org.apache.linkis.entrance +import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.exception.{ErrorException, LinkisException, LinkisRuntimeException} import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.cs.CSEntranceHelper +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorException, SubmitFailedException} import org.apache.linkis.entrance.execute.EntranceJob +import org.apache.linkis.entrance.job.EntranceExecutionJob import org.apache.linkis.entrance.log.LogReader +import org.apache.linkis.entrance.parser.ParserUtils import org.apache.linkis.entrance.timeout.JobTimeoutManager import org.apache.linkis.entrance.utils.JobHistoryHelper +import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.task.RequestTaskKill import org.apache.linkis.governance.common.utils.LoggerUtils +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.rpc.Sender +import org.apache.linkis.rpc.conf.RPCConfiguration import org.apache.linkis.scheduler.queue.{Job, SchedulerEventState} import org.apache.linkis.server.conf.ServerConfiguration import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils -import java.text.MessageFormat -import java.util +import java.{lang, util} +import java.text.{MessageFormat, SimpleDateFormat} +import java.util.Date +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ abstract class EntranceServer extends Logging { @@ -46,6 +60,8 @@ abstract class EntranceServer extends Logging { private val jobTimeoutManager: JobTimeoutManager = new JobTimeoutManager() + private val timeoutCheck = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + def init(): Unit def getName: String @@ -82,6 +98,294 @@ abstract class EntranceServer extends Logging { LoggerUtils.setJobIdMDC(jobRequest.getId.toString) val logAppender = new java.lang.StringBuilder() + jobRequest = dealInitedJobRequest(jobRequest, logAppender) + + val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest) + Utils.tryThrow { + job.init() + job.setLogListener(getEntranceContext.getOrCreateLogManager()) + job.setProgressListener(getEntranceContext.getOrCreatePersistenceManager()) + job.setJobListener(getEntranceContext.getOrCreatePersistenceManager()) + job match { + case entranceJob: EntranceJob => + entranceJob.setEntranceListenerBus(getEntranceContext.getOrCreateEventListenerBus) + case _ => + } + Utils.tryCatch { + if (logAppender.length() > 0) { + job.getLogListener.foreach(_.onLogUpdate(job, logAppender.toString.trim)) + } + } { t => + logger.error("Failed to write init log, reason: ", t) + } + + /** + * job.afterStateChanged() method is only called in job.run(), and job.run() is called only + * after job is scheduled so it suggest that we lack a hook for job init, currently we call + * this to trigger JobListener.onJobinit() + */ + Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) + if (logger.isDebugEnabled()) { + logger.debug( + s"After code preprocessing, the real execution code is:${jobRequest.getExecutionCode}" + ) + } + if (StringUtils.isBlank(jobRequest.getExecutionCode)) { + throw new SubmitFailedException( + SUBMIT_CODE_ISEMPTY.getErrorCode, + SUBMIT_CODE_ISEMPTY.getErrorDesc + ) + } + getEntranceContext.getOrCreateScheduler().submit(job) + val msg = LogUtils.generateInfo( + s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted " + ) + logger.info(msg) + + job match { + case entranceJob: EntranceJob => + entranceJob.getJobRequest.setReqId(job.getId()) + if (timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { + jobTimeoutManager.add(job.getId(), entranceJob) + } + entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg)) + case _ => + } + LoggerUtils.removeJobIdMDC() + job + } { t => + LoggerUtils.removeJobIdMDC() + job.onFailure("Submitting the query failed!(提交查询失败!)", t) + val _jobRequest: JobRequest = + getEntranceContext.getOrCreateEntranceParser().parseToJobRequest(job) + getEntranceContext + .getOrCreatePersistenceManager() + .createPersistenceEngine() + .updateIfNeeded(_jobRequest) + t match { + case e: LinkisException => e + case e: LinkisRuntimeException => e + case t: Throwable => + new SubmitFailedException( + SUBMITTING_QUERY_FAILED.getErrorCode, + SUBMITTING_QUERY_FAILED.getErrorDesc + ExceptionUtils.getRootCauseMessage(t), + t + ) + } + } + } + + def logReader(execId: String): LogReader + + def getJob(execId: String): Option[Job] = + getEntranceContext.getOrCreateScheduler().get(execId).map(_.asInstanceOf[Job]) + + private[entrance] def getEntranceWebSocketService: Option[EntranceWebSocketService] = + if (ServerConfiguration.BDP_SERVER_SOCKET_MODE.getValue) { + if (entranceWebSocketService.isEmpty) synchronized { + if (entranceWebSocketService.isEmpty) { + entranceWebSocketService = Some(new EntranceWebSocketService) + entranceWebSocketService.foreach(_.setEntranceServer(this)) + entranceWebSocketService.foreach( + getEntranceContext.getOrCreateEventListenerBus.addListener + ) + } + } + entranceWebSocketService + } else None + + def getAllUndoneTask(filterWords: String): Array[EntranceJob] = { + val consumers = getEntranceContext + .getOrCreateScheduler() + .getSchedulerContext + .getOrCreateConsumerManager + .listConsumers() + .toSet + val filterConsumer = if (StringUtils.isNotBlank(filterWords)) { + consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + } else { + consumers + } + filterConsumer + .flatMap { consumer => + consumer.getRunningEvents ++ consumer.getConsumeQueue.getWaitingEvents + } + .filter(job => job != null && job.isInstanceOf[EntranceJob]) + .map(_.asInstanceOf[EntranceJob]) + .toArray + } + + def getAllConsumeQueueTask(): Array[EntranceJob] = { + val consumers = getEntranceContext + .getOrCreateScheduler() + .getSchedulerContext + .getOrCreateConsumerManager + .listConsumers() + .toSet + + consumers + .flatMap { consumer => + consumer.getConsumeQueue.getWaitingEvents + } + .filter(job => job != null && job.isInstanceOf[EntranceJob]) + .map(_.asInstanceOf[EntranceJob]) + .toArray + } + + def clearAllConsumeQueue(): Unit = { + getEntranceContext + .getOrCreateScheduler() + .getSchedulerContext + .getOrCreateConsumerManager + .listConsumers() + .foreach(_.getConsumeQueue.clearAll()) + } + + def updateAllNotExecutionTaskInstances(retryWhenUpdateFail: Boolean): Unit = { + val consumeQueueTasks = getAllConsumeQueueTask() + + clearAllConsumeQueue() + logger.info("Finished to clean all ConsumeQueue") + + if (consumeQueueTasks != null && consumeQueueTasks.length > 0) { + val taskIds = new util.ArrayList[Long]() + consumeQueueTasks.foreach(job => { + taskIds.add(job.getJobRequest.getId.asInstanceOf[Long]) + job match { + case entranceExecutionJob: EntranceExecutionJob => + val msg = LogUtils.generateWarn( + s"job ${job.getJobRequest.getId} clean from ConsumeQueue, wait for failover" + ) + entranceExecutionJob.getLogListener.foreach(_.onLogUpdate(entranceExecutionJob, msg)) + entranceExecutionJob.getLogWriter.foreach(_.close()) + case _ => + } + }) + + JobHistoryHelper.updateAllConsumeQueueTask(taskIds, retryWhenUpdateFail) + logger.info("Finished to update all not execution task instances") + } + } + + /** + * execute failover job (提交故障转移任务,返回新的execId) + * + * @param jobRequest + */ + def failoverExecute(jobRequest: JobRequest): Unit = { + + if (null == jobRequest || null == jobRequest.getId || jobRequest.getId <= 0) { + throw new EntranceErrorException( + PERSIST_JOBREQUEST_ERROR.getErrorCode, + PERSIST_JOBREQUEST_ERROR.getErrorDesc + ) + } + + val logAppender = new java.lang.StringBuilder() + logAppender.append( + "*************************************FAILOVER************************************** \n" + ) + + // try to kill ec + killOldEC(jobRequest, logAppender); + + // deal Inited jobRequest, if status is Inited, need to deal by all Interceptors, such as set log_path + if (SchedulerEventState.isInitedByStr(jobRequest.getStatus)) { + dealInitedJobRequest(jobRequest, logAppender) + } + + if ( + EntranceConfiguration.ENTRANCE_FAILOVER_RUNNING_KILL_ENABLED.getValue && + SchedulerEventState.isRunningByStr(jobRequest.getStatus) + ) { + // deal Running jobRequest, if enabled, status changed from Running to Cancelled + dealRunningJobRequest(jobRequest, logAppender) + } else { + // init and submit + initAndSubmitJobRequest(jobRequest, logAppender) + } + } + + def killOldEC(jobRequest: JobRequest, logAppender: lang.StringBuilder): Unit = { + Utils.tryCatch { + logAppender.append( + LogUtils + .generateInfo(s"job ${jobRequest.getId} start to kill old ec \n") + ) + if ( + !SchedulerEventState.isRunning(SchedulerEventState.withName(jobRequest.getStatus)) + || !SchedulerEventState.isScheduled(SchedulerEventState.withName(jobRequest.getStatus)) + ) { + val msg = s"job ${jobRequest.getId} status is not running or scheduled, ignore it" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + return + } + + if ( + jobRequest.getMetrics == null + || !jobRequest.getMetrics.containsKey(TaskConstant.JOB_ENGINECONN_MAP) + ) { + val msg = s"job ${jobRequest.getId} not have EC info, ignore it" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + return + } + + val engineMap = jobRequest.getMetrics + .get(TaskConstant.JOB_ENGINECONN_MAP) + .asInstanceOf[util.Map[String, Object]] + + val engineInstance = + engineMap.asScala + .map(_._2.asInstanceOf[util.Map[String, Object]]) + .filter(_.containsKey(TaskConstant.ENGINE_INSTANCE)) + .maxBy(_.getOrDefault(TaskConstant.ENGINE_CONN_SUBMIT_TIME, "0").toString) + + if (engineInstance == null || engineInstance.containsKey(TaskConstant.FAILOVER_FLAG)) { + val msg = + s"job ${jobRequest.getId} do not submit to EC or already failover, not need kill ec" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + return + } + engineInstance.put(TaskConstant.FAILOVER_FLAG, "") + + val ecInstance = ServiceInstance( + GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue, + engineInstance.get(TaskConstant.ENGINE_INSTANCE).toString + ) + if (jobRequest.getLabels.asScala.exists(_.isInstanceOf[ExecuteOnceLabel])) { + // kill ec by linkismanager + val engineStopRequest = new EngineStopRequest + engineStopRequest.setServiceInstance(ecInstance) + // send to linkismanager kill ec + Sender + .getSender(RPCConfiguration.LINKIS_MANAGER_SERVICE_NAME.getValue) + .send(engineStopRequest) + val msg = + s"job ${jobRequest.getId} send EngineStopRequest to linkismanager, kill EC instance $ecInstance" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + } else if (engineInstance.containsKey(TaskConstant.ENGINE_CONN_TASK_ID)) { + // get ec taskId + val engineTaskId = engineInstance.get(TaskConstant.ENGINE_CONN_TASK_ID).toString + // send to ec kill task + Sender + .getSender(ecInstance) + .send(RequestTaskKill(engineTaskId)) + val msg = + s"job ${jobRequest.getId} send RequestTaskKill to kill engineConn $ecInstance, execID $engineTaskId" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + } + } { t => + logger.error(s"job ${jobRequest.getId} kill ec error", t) + } + } + + def dealInitedJobRequest(jobReq: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + var jobRequest = jobReq Utils.tryThrow( getEntranceContext .getOrCreateEntranceInterceptors() @@ -128,6 +432,68 @@ abstract class EntranceServer extends Logging { .updateIfNeeded(jobRequest) error } + jobRequest + } + + def dealRunningJobRequest(jobRequest: JobRequest, logAppender: lang.StringBuilder): Unit = { + Utils.tryCatch { + // error_msg + val msg = + MessageFormat.format( + EntranceErrorCodeSummary.FAILOVER_RUNNING_TO_CANCELLED.getErrorDesc, + jobRequest.getId.toString + ) + // init jobRequest properties + jobRequest.setStatus(SchedulerEventState.Cancelled.toString) + jobRequest.setProgress("1.0") + jobRequest.setInstances(Sender.getThisInstance) + jobRequest.setErrorCode(EntranceErrorCodeSummary.FAILOVER_RUNNING_TO_CANCELLED.getErrorCode) + jobRequest.setErrorDesc(msg) + + // update jobRequest + getEntranceContext + .getOrCreatePersistenceManager() + .createPersistenceEngine() + .updateIfNeeded(jobRequest) + + // getOrGenerate log_path + var logPath = jobRequest.getLogPath + if (StringUtils.isBlank(logPath)) { + ParserUtils.generateLogPath(jobRequest, null) + logPath = jobRequest.getLogPath + logAppender.append( + LogUtils.generateInfo(s"job ${jobRequest.getId} generate new logPath $logPath \n") + ) + } + val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest) + val logWriter = getEntranceContext.getOrCreateLogManager().createLogWriter(job) + if (logAppender.length() > 0) { + logWriter.write(logAppender.toString.trim) + } + + logWriter.write(LogUtils.generateInfo(msg) + "\n") + logWriter.flush() + logWriter.close() + + } { case e: Exception => + logger.error(s"Job ${jobRequest.getId} failover, change status error", e) + } + } + + def initAndSubmitJobRequest(jobRequest: JobRequest, logAppender: lang.StringBuilder): Unit = { + // init properties + initJobRequestProperties(jobRequest, logAppender) + + // update jobRequest + getEntranceContext + .getOrCreatePersistenceManager() + .createPersistenceEngine() + .updateIfNeeded(jobRequest) + + // reset `UpdateOrderFlag` + jobRequest.setUpdateOrderFlag(true) + + logger.info(s"job ${jobRequest.getId} update JobRequest success") val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest) Utils.tryThrow { @@ -145,7 +511,7 @@ abstract class EntranceServer extends Logging { job.getLogListener.foreach(_.onLogUpdate(job, logAppender.toString.trim)) } } { t => - logger.error("Failed to write init log, reason: ", t) + logger.error("Failed to write init JobRequest log, reason: ", t) } /** @@ -154,27 +520,35 @@ abstract class EntranceServer extends Logging { * this to trigger JobListener.onJobinit() */ Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) + if (logger.isDebugEnabled()) { + logger.debug( + s"After code preprocessing, the real execution code is:${jobRequest.getExecutionCode}" + ) + } + if (StringUtils.isBlank(jobRequest.getExecutionCode)) { + throw new SubmitFailedException( + SUBMIT_CODE_ISEMPTY.getErrorCode, + SUBMIT_CODE_ISEMPTY.getErrorDesc + ) + } getEntranceContext.getOrCreateScheduler().submit(job) val msg = LogUtils.generateInfo( - s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted " + s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted, success to failover" ) logger.info(msg) job match { case entranceJob: EntranceJob => entranceJob.getJobRequest.setReqId(job.getId()) - if (jobTimeoutManager.timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { + if (timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { jobTimeoutManager.add(job.getId(), entranceJob) } entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg)) case _ => } - LoggerUtils.removeJobIdMDC() - job } { t => - LoggerUtils.removeJobIdMDC() job.onFailure("Submitting the query failed!(提交查询失败!)", t) - val _jobRequest: JobRequest = + val _jobRequest = getEntranceContext.getOrCreateEntranceParser().parseToJobRequest(job) getEntranceContext .getOrCreatePersistenceManager() @@ -193,26 +567,83 @@ abstract class EntranceServer extends Logging { } } - def logReader(execId: String): LogReader + private def initJobRequestProperties( + jobRequest: JobRequest, + logAppender: lang.StringBuilder + ): Unit = { + logger.info(s"job ${jobRequest.getId} start to initialize the properties") + val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") + val initInstance = Sender.getThisInstance + val initDate = new Date(System.currentTimeMillis) + val initStatus = SchedulerEventState.Inited.toString + val initProgress = "0.0" + val initReqId = "" - def getJob(execId: String): Option[Job] = - getEntranceContext.getOrCreateScheduler().get(execId).map(_.asInstanceOf[Job]) + logAppender.append( + LogUtils + .generateInfo(s"job ${jobRequest.getId} start to Initialize the properties \n") + ) + logAppender.append( + LogUtils.generateInfo(s"the instances ${jobRequest.getInstances} -> ${initInstance} \n") + ) + logAppender.append( + LogUtils.generateInfo( + s"the created_time ${sdf.format(jobRequest.getCreatedTime)} -> ${sdf.format(initDate)} \n" + ) + ) + logAppender.append( + LogUtils.generateInfo(s"the status ${jobRequest.getStatus} -> $initStatus \n") + ) + logAppender.append( + LogUtils.generateInfo(s"the progress ${jobRequest.getProgress} -> $initProgress \n") + ) - private[entrance] def getEntranceWebSocketService: Option[EntranceWebSocketService] = - if (ServerConfiguration.BDP_SERVER_SOCKET_MODE.getValue) { - if (entranceWebSocketService.isEmpty) synchronized { - if (entranceWebSocketService.isEmpty) { - entranceWebSocketService = Some(new EntranceWebSocketService) - entranceWebSocketService.foreach(_.setEntranceServer(this)) - entranceWebSocketService.foreach( - getEntranceContext.getOrCreateEventListenerBus.addListener + val metricMap = new util.HashMap[String, Object]() + if (EntranceConfiguration.ENTRANCE_FAILOVER_RETAIN_METRIC_ENGINE_CONN_ENABLED.getValue) { + if ( + jobRequest.getMetrics != null && jobRequest.getMetrics.containsKey( + TaskConstant.JOB_ENGINECONN_MAP ) - } + ) { + val oldEngineconnMap = jobRequest.getMetrics + .get(TaskConstant.JOB_ENGINECONN_MAP) + .asInstanceOf[util.Map[String, Object]] + metricMap.put(TaskConstant.JOB_ENGINECONN_MAP, oldEngineconnMap) } - entranceWebSocketService - } else None + } - def getAllUndoneTask(filterWords: String): Array[EntranceJob] = { + if (EntranceConfiguration.ENTRANCE_FAILOVER_RETAIN_METRIC_YARN_RESOURCE_ENABLED.getValue) { + if ( + jobRequest.getMetrics != null && jobRequest.getMetrics.containsKey( + TaskConstant.JOB_YARNRESOURCE + ) + ) { + val oldResourceMap = jobRequest.getMetrics + .get(TaskConstant.JOB_YARNRESOURCE) + .asInstanceOf[util.Map[String, Object]] + metricMap.put(TaskConstant.JOB_YARNRESOURCE, oldResourceMap) + } + } + + jobRequest.setInstances(initInstance) + jobRequest.setCreatedTime(initDate) + jobRequest.setStatus(initStatus) + jobRequest.setProgress(initProgress) + jobRequest.setReqId(initReqId) + jobRequest.setErrorCode(0) + jobRequest.setErrorDesc("") + jobRequest.setMetrics(metricMap) + jobRequest.getMetrics.put(TaskConstant.JOB_SUBMIT_TIME, initDate) + // Allow task status updates to be unordered + jobRequest.setUpdateOrderFlag(false) + + logAppender.append( + LogUtils.generateInfo(s"job ${jobRequest.getId} success to initialize the properties \n") + ) + logger.info(s"job ${jobRequest.getId} success to initialize the properties") + } + + def getAllUndoneTask(filterWords: String, ecType: String = null): Array[EntranceJob] = { val consumers = getEntranceContext .getOrCreateScheduler() .getSchedulerContext @@ -220,7 +651,14 @@ abstract class EntranceServer extends Logging { .listConsumers() .toSet val filterConsumer = if (StringUtils.isNotBlank(filterWords)) { - consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + if (StringUtils.isNotBlank(ecType)) { + consumers.filter(consumer => + consumer.getGroup.getGroupName.contains(filterWords) && consumer.getGroup.getGroupName + .contains(ecType) + ) + } else { + consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + } } else { consumers } @@ -233,6 +671,40 @@ abstract class EntranceServer extends Logging { .toArray } + /** + * to check timeout task,and kill timeout task timeout: default > 48h + */ + def startTimeOutCheck(): Unit = { + Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + override def run(): Unit = { + Utils.tryCatch { + + val timeoutType = EntranceConfiguration.ENTRANCE_TASK_TIMEOUT.getHotValue() + logger.info(s"Start to check timeout Job, timout is ${timeoutType}") + val timeoutTime = System.currentTimeMillis() - timeoutType.toLong + getAllUndoneTask(null, null).filter(job => job.createTime < timeoutTime).foreach { + job => + job.onFailure(s"Job has run for longer than the maximum time $timeoutType", null) + } + logger.info(s"Finished to check timeout Job, timout is ${timeoutType}") + } { case t: Throwable => + logger.warn(s"TimeoutDetective Job failed. ${t.getMessage}", t) + } + } + + }, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + TimeUnit.MILLISECONDS + ) + } + + if (timeoutCheck) { + logger.info("Job time check is enabled") + startTimeOutCheck() + } + } object EntranceServer { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala index 714b9f0cc2..b5339c9e2e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala @@ -215,18 +215,6 @@ class EntranceWebSocketService s"Your job's execution code is (after variable substitution and code check) " ) ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) entranceServer.getEntranceContext .getOrCreateLogManager() .onLogUpdate( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala index 7c3935e69b..9e09374e4d 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala @@ -215,12 +215,83 @@ object EntranceConfiguration { val GROUP_CACHE_EXPIRE_TIME = CommonVars("wds.linkis.consumer.group.expire.time", 50) val CLIENT_MONITOR_CREATOR = - CommonVars("wds.linkis.entrance.client.monitor.creator", "LINKISCLI") + CommonVars("wds.linkis.entrance.client.monitor.creator", "LINKISCLI,BdpClient") val CREATOR_IP_SWITCH = CommonVars("wds.linkis.entrance.user.creator.ip.interceptor.switch", false) - val ENABLE_ENTRANCE_DIRTY_DATA_CLEAR = - CommonVars("linkis.entrance.auto.clean.dirty.data.enable", false) + val TEMPLATE_CONF_SWITCH = + CommonVars("wds.linkis.entrance.template.conf.interceptor.switch", false) + + val TEMPLATE_CONF_ADD_ONCE_LABEL_ENABLE = + CommonVars("wds.linkis.entrance.template.add.once.label.enable", false) + + val ENABLE_ENTRANCE_DIRTY_DATA_CLEAR: CommonVars[Boolean] = + CommonVars[Boolean]("linkis.entrance.auto.clean.dirty.data.enable", true) + + val ENTRANCE_CREATOR_JOB_LIMIT: CommonVars[Int] = + CommonVars[Int]( + "linkis.entrance.creator.job.concurrency.limit", + 10000, + "Creator task concurrency limit parameters" + ) + + val ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE = + CommonVars("linkis.entrance.creator.job.concurrency.limit.conf.cache.time", 30L) + + val ENTRANCE_TASK_TIMEOUT = + CommonVars("linkis.entrance.task.timeout", new TimeType("48h")) + + val ENTRANCE_TASK_TIMEOUT_SCAN = + CommonVars("linkis.entrance.task.timeout.scan", new TimeType("12h")) + + val ENABLE_HDFS_JVM_USER = + CommonVars[Boolean]("linkis.entrance.enable.hdfs.jvm.user", true).getValue + + val ENTRANCE_FAILOVER_ENABLED = CommonVars("linkis.entrance.failover.enable", false).getValue + + val ENTRANCE_FAILOVER_SCAN_INIT_TIME = + CommonVars("linkis.entrance.failover.scan.init.time", 3 * 1000).getValue + + val ENTRANCE_FAILOVER_SCAN_INTERVAL = + CommonVars("linkis.entrance.failover.scan.interval", 30 * 1000).getValue + + val ENTRANCE_FAILOVER_DATA_NUM_LIMIT = + CommonVars("linkis.entrance.failover.data.num.limit", 10).getValue + + val ENTRANCE_FAILOVER_DATA_INTERVAL_TIME = + CommonVars("linkis.entrance.failover.data.interval.time", new TimeType("1d").toLong).getValue + + // if true, the waitForRetry job in runningJobs can be failover + val ENTRANCE_FAILOVER_RETRY_JOB_ENABLED = + CommonVars("linkis.entrance.failover.retry.job.enable", false) + + val ENTRANCE_UPDATE_BATCH_SIZE = CommonVars("linkis.entrance.update.batch.size", 100) + + // if true, the job in ConsumeQueue can be failover + val ENTRANCE_SHUTDOWN_FAILOVER_CONSUME_QUEUE_ENABLED = + CommonVars("linkis.entrance.shutdown.failover.consume.queue.enable", true).getValue + + val ENTRANCE_GROUP_SCAN_ENABLED = CommonVars("linkis.entrance.group.scan.enable", false) + + val ENTRANCE_GROUP_SCAN_INIT_TIME = CommonVars("linkis.entrance.group.scan.init.time", 3 * 1000) + + val ENTRANCE_GROUP_SCAN_INTERVAL = CommonVars("linkis.entrance.group.scan.interval", 60 * 1000) + + val ENTRANCE_FAILOVER_RETAIN_METRIC_ENGINE_CONN_ENABLED = + CommonVars("linkis.entrance.failover.retain.metric.engine.conn.enable", false) + + val ENTRANCE_FAILOVER_RETAIN_METRIC_YARN_RESOURCE_ENABLED = + CommonVars("linkis.entrance.failover.retain.metric.yarn.resource.enable", false) + + // if true, job whose status is running will be set to Cancelled + val ENTRANCE_FAILOVER_RUNNING_KILL_ENABLED = + CommonVars("linkis.entrance.failover.running.kill.enable", false) + + val LINKIS_ENTRANCE_SKIP_ORCHESTRATOR = + CommonVars("linkis.entrance.skip.orchestrator", false).getValue + + val ENABLE_HDFS_RES_DIR_PRIVATE = + CommonVars[Boolean]("linkis.entrance.enable.hdfs.res.dir.private", false).getValue } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala index 266de6eb5b..0638ef59d2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala @@ -261,6 +261,15 @@ class DefaultEntranceExecutor(id: Long) true } + def getRunningOrchestrationFuture: Option[OrchestrationFuture] = { + val asyncReturn = getEngineExecuteAsyncReturn + if (asyncReturn.isDefined) { + asyncReturn.get.getOrchestrationFuture() + } else { + None + } + } + override protected def callExecute(request: ExecuteRequest): ExecuteResponse = { val entranceExecuteRequest: EntranceExecuteRequest = request match { @@ -282,7 +291,10 @@ class DefaultEntranceExecutor(id: Long) val msg = s"JobRequest (${entranceExecuteRequest.jobId()}) was submitted to Orchestrator." logger.info(msg) entranceExecuteRequest.getJob.getLogListener.foreach( - _.onLogUpdate(entranceExecuteRequest.getJob, LogUtils.generateInfo(msg)) + _.onLogUpdate( + entranceExecuteRequest.getJob, + LogUtils.generateInfo(msg + "(您的任务已经提交给Orchestrator进行编排执行)") + ) ) if (entranceExecuteRequest.getJob.getJobRequest.getMetrics == null) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala index be7fb13871..d20b5ea8fb 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala @@ -118,14 +118,6 @@ abstract class EntranceExecutor(val id: Long) extends Executor with Logging { super.hashCode() } - def getRunningOrchestrationFuture: Option[OrchestrationFuture] = { - if (null != engineReturn) { - engineReturn.getOrchestrationFuture() - } else { - None - } - } - } class EngineExecuteAsyncReturn( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala index 05bc5311b0..4e7ca79367 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala @@ -20,8 +20,10 @@ package org.apache.linkis.entrance.execute import org.apache.linkis.common.exception.WarnException import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.EntranceErrorException +import org.apache.linkis.entrance.execute.simple.{SimpleEntranceExecutor, SimpleExecuteBusContext} import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.scheduler.executer.{Executor, ExecutorManager} import org.apache.linkis.scheduler.queue.{GroupFactory, Job, SchedulerEvent} @@ -91,6 +93,14 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) case jobReq: JobRequest => val entranceEntranceExecutor = new DefaultEntranceExecutor(jobReq.getId) + if (EntranceConfiguration.LINKIS_ENTRANCE_SKIP_ORCHESTRATOR) { + new SimpleEntranceExecutor( + jobReq.getId, + SimpleExecuteBusContext.getOrchestratorListenerBusContext() + ) + } else { + new DefaultEntranceExecutor(jobReq.getId) + } // getEngineConn Executor job.getLogListener.foreach( _.onLogUpdate( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala index b762f54605..50efcafc85 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala @@ -160,7 +160,7 @@ abstract class EntranceJob extends Job { getLogListener.foreach( _.onLogUpdate( this, - LogUtils.generateInfo("Your job is Scheduled. Please wait it to run.") + LogUtils.generateInfo("Your job is Scheduled. Please wait it to run.(您的任务已经调度运行中)") ) ) case WaitForRetry => @@ -174,7 +174,8 @@ abstract class EntranceJob extends Job { getLogListener.foreach( _.onLogUpdate( this, - LogUtils.generateInfo("Your job is Running now. Please wait it to complete.") + LogUtils + .generateInfo("Your job is Running now. Please wait it to complete.(您的任务已经在运行中)") ) ) getJobRequest.getMetrics.put( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala index 3efcf41c89..a251c56de7 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala @@ -22,7 +22,7 @@ import org.apache.linkis.orchestrator.ecm.EngineConnManager import org.apache.linkis.scheduler.listener.ExecutorListener import org.apache.linkis.scheduler.queue.GroupFactory -class EntranceExecutorManagerImpl(groupFactory: GroupFactory, engineConnManager: EngineConnManager) +class EntranceExecutorManagerImpl(groupFactory: GroupFactory) extends EntranceExecutorManager(groupFactory) { override def getOrCreateInterceptors(): Array[ExecuteRequestInterceptor] = Array( diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IORecord.java b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/EngineLockListener.scala similarity index 68% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IORecord.java rename to linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/EngineLockListener.scala index 90bcbe9da8..6f2798a52c 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IORecord.java +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/EngineLockListener.scala @@ -15,21 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.storage.resultset.io; +package org.apache.linkis.entrance.execute.simple +import org.apache.linkis.orchestrator.listener.OrchestratorListenerBusContext -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.resultset.ResultRecord; +object SimpleExecuteBusContext { -public class IORecord implements ResultRecord { + private lazy val orchestratorListenerBusContext = OrchestratorListenerBusContext.createBusContext - public byte[] value; + def getOrchestratorListenerBusContext(): OrchestratorListenerBusContext = + orchestratorListenerBusContext - public IORecord(byte[] value) { - this.value = value; - } - - @Override - public Record cloneRecord() { - return new IORecord(value); - } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala new file mode 100644 index 0000000000..bc52fbd800 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple + +import org.apache.linkis.common.listener.Event +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.entrance.EntranceServer +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.orchestrator.listener.OrchestratorAsyncEvent +import org.apache.linkis.orchestrator.listener.task.{ + TaskLogEvent, + TaskLogListener, + TaskProgressListener, + TaskRunningInfoEvent +} + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import javax.annotation.PostConstruct + +@Component +class SimpleASyncListener extends TaskLogListener with TaskProgressListener with Logging { + + @Autowired private var entranceServer: EntranceServer = _ + + @PostConstruct + def init(): Unit = { + if (EntranceConfiguration.LINKIS_ENTRANCE_SKIP_ORCHESTRATOR) { + SimpleExecuteBusContext + .getOrchestratorListenerBusContext() + .getOrchestratorAsyncListenerBus + .addListener(this) + } + } + + override def onLogUpdate(taskLogEvent: TaskLogEvent): Unit = {} + + override def onProgressOn(taskProgressEvent: TaskRunningInfoEvent): Unit = {} + + override def onEvent(event: OrchestratorAsyncEvent): Unit = {} + + override def onEventError(event: Event, t: Throwable): Unit = {} +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala new file mode 100644 index 0000000000..d9e18081d2 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} +import org.apache.linkis.entrance.execute.{EngineExecuteAsyncReturn, EntranceExecutor} +import org.apache.linkis.entrance.job.EntranceExecuteRequest +import org.apache.linkis.governance.common.utils.LoggerUtils +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.orchestrator.code.plans.ast.CodeJob +import org.apache.linkis.orchestrator.code.plans.logical.CodeLogicalUnitTaskDesc +import org.apache.linkis.orchestrator.computation.entity.ComputationJobReq +import org.apache.linkis.orchestrator.computation.physical.CodeLogicalUnitExecTask +import org.apache.linkis.orchestrator.converter.ASTContextImpl +import org.apache.linkis.orchestrator.execution.{ + AsyncTaskResponse, + FailedTaskResponse, + SucceedTaskResponse +} +import org.apache.linkis.orchestrator.listener.OrchestratorListenerBusContext +import org.apache.linkis.orchestrator.plans.physical.{ExecTask, PhysicalContextImpl} +import org.apache.linkis.orchestrator.plans.unit.CodeLogicalUnit +import org.apache.linkis.scheduler.executer._ + +import java.util + +class SimpleEntranceExecutor( + id: Long, + orchestratorListenerBusContext: OrchestratorListenerBusContext +) extends EntranceExecutor(id) + with SingleTaskOperateSupport + with Logging { + + private var codeUnitExecTask: CodeLogicalUnitExecTask = null + + override protected def callExecute(request: ExecuteRequest): ExecuteResponse = { + val entranceExecuteRequest: EntranceExecuteRequest = request match { + case request: EntranceExecuteRequest => + request + case _ => + throw new EntranceErrorException( + EntranceErrorCode.EXECUTE_REQUEST_INVALID.getErrCode, + s"Invalid entranceExecuteRequest : ${request.code}" + ) + } + // 1. create JobReq + val computationJobReq = requestToComputationJobReq(entranceExecuteRequest) + // 2. create code job + val codeJob = new CodeJob(null, null) + val astContext = ASTContextImpl.newBuilder().setJobReq(computationJobReq).build() + codeJob.setAstContext(astContext) + codeJob.setCodeLogicalUnit(computationJobReq.getCodeLogicalUnit) + codeJob.setParams(computationJobReq.getParams) + codeJob.setName(computationJobReq.getName + "_Job") + codeJob.setSubmitUser(computationJobReq.getSubmitUser) + codeJob.setExecuteUser(computationJobReq.getExecuteUser) + codeJob.setLabels(computationJobReq.getLabels) + codeJob.setPriority(computationJobReq.getPriority) + codeUnitExecTask = new CodeLogicalUnitExecTask(Array[ExecTask](), Array[ExecTask]()) + // set job id, can find by getEntranceContext.getOrCreateScheduler().get(execId).map(_.asInstanceOf[Job]) + codeUnitExecTask.setId(entranceExecuteRequest.getJob.getId) + // 3.set code unit + codeUnitExecTask.setCodeLogicalUnit(computationJobReq.getCodeLogicalUnit) + codeUnitExecTask.setTaskDesc(CodeLogicalUnitTaskDesc(codeJob)) + // 4. set context + val context = new PhysicalContextImpl(codeUnitExecTask, Array.empty) + context.setSyncBus(orchestratorListenerBusContext.getOrchestratorSyncListenerBus) + context.setAsyncBus(orchestratorListenerBusContext.getOrchestratorAsyncListenerBus) + // 5. execute + val response = codeUnitExecTask.execute() + response match { + case async: AsyncTaskResponse => + new EngineExecuteAsyncReturn(request, null) + case succeed: SucceedTaskResponse => + logger.info(s"Succeed to execute ExecTask(${getId})") + SuccessExecuteResponse() + case failedTaskResponse: FailedTaskResponse => + logger.info(s"Failed to execute ExecTask(${getId})") + ErrorExecuteResponse(failedTaskResponse.getErrorMsg, failedTaskResponse.getCause) + case _ => + logger.warn(s"ExecTask(${getId}) need to retry") + ErrorExecuteResponse("unknown response: " + response, null) + } + } + + def requestToComputationJobReq( + entranceExecuteRequest: EntranceExecuteRequest + ): ComputationJobReq = { + val jobReqBuilder = ComputationJobReq.newBuilder() + jobReqBuilder.setId(entranceExecuteRequest.jobId()) + jobReqBuilder.setSubmitUser(entranceExecuteRequest.submitUser()) + jobReqBuilder.setExecuteUser(entranceExecuteRequest.executeUser()) + val codeTypeLabel: Label[_] = LabelUtil.getCodeTypeLabel(entranceExecuteRequest.getLabels) + if (null == codeTypeLabel) { + throw new EntranceErrorException( + EntranceErrorCode.EXECUTE_REQUEST_INVALID.getErrCode, + s"code Type Label is needed" + ) + } + val codes = new util.ArrayList[String]() + codes.add(entranceExecuteRequest.code()) + val codeLogicalUnit = + new CodeLogicalUnit(codes, codeTypeLabel.asInstanceOf[CodeLanguageLabel]) + jobReqBuilder.setCodeLogicalUnit(codeLogicalUnit) + jobReqBuilder.setLabels(entranceExecuteRequest.getLabels) + jobReqBuilder.setExecuteUser(entranceExecuteRequest.executeUser()) + jobReqBuilder.setParams(entranceExecuteRequest.properties()) + jobReqBuilder.build().asInstanceOf[ComputationJobReq] + } + + override def kill(): Boolean = { + LoggerUtils.setJobIdMDC(getId.toString) + logger.info("Entrance start to kill job {} invoke Orchestrator ", this.getId) + Utils.tryAndWarn { + if (null != codeUnitExecTask) { + codeUnitExecTask.kill() + } + } + LoggerUtils.removeJobIdMDC() + true + } + + override def pause(): Boolean = { + true + } + + override def resume(): Boolean = { + true + } + + override def close(): Unit = { + getEngineExecuteAsyncReturn.foreach { e => + e.notifyError(s"$toString has already been completed with state $state.") + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala new file mode 100644 index 0000000000..46107ff701 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple + +import org.apache.linkis.common.listener.Event +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.entrance.EntranceServer +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.orchestrator.listener.OrchestratorSyncEvent +import org.apache.linkis.orchestrator.listener.task.{ + TaskErrorResponseEvent, + TaskResultSetEvent, + TaskResultSetListener, + TaskResultSetSizeEvent, + TaskStatusEvent, + TaskStatusListener +} + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import javax.annotation.PostConstruct + +/** + * 1.TaskLogListener: job.getLogListener.foreach(_.onLogUpdate(job, logEvent.log)) + * + * 2.TaskProgressListener: entranceJob.getProgressListener.foreach( _.onProgressUpdate(entranceJob, + * progressInfoEvent.progress, entranceJob.getProgressInfo) + * + * 3.TaskResultSetListener entranceContext.getOrCreatePersistenceManager().onResultSizeCreated(j, + * taskResultSize.resultSize) .getOrCreatePersistenceManager() .onResultSetCreated( + * entranceExecuteRequest.getJob, AliasOutputExecuteResponse(firstResultSet.alias, + * firstResultSet.result) ) + * + * 4. TaskStatusListener getEngineExecuteAsyncReturn.foreach { jobReturn => jobReturn.notifyStatus( + * ResponseTaskStatus(entranceExecuteRequest.getJob.getId, ExecutionNodeStatus.Succeed) ) } val msg + * = failedResponse.getErrorCode + ", " + failedResponse.getErrorMsg + * getEngineExecuteAsyncReturn.foreach { jobReturn => jobReturn.notifyError(msg, + * failedResponse.getCause) jobReturn.notifyStatus( + * ResponseTaskStatus(entranceExecuteRequest.getJob.getId, ExecutionNodeStatus.Failed) ) } + */ +@Component +class SimpleSyncListener extends TaskStatusListener with TaskResultSetListener with Logging { + + @Autowired private var entranceServer: EntranceServer = _ + + @PostConstruct + def init(): Unit = { + if (EntranceConfiguration.LINKIS_ENTRANCE_SKIP_ORCHESTRATOR) { + SimpleExecuteBusContext + .getOrchestratorListenerBusContext() + .getOrchestratorSyncListenerBus + .addListener(this) + } + } + + override def onStatusUpdate(taskStatusEvent: TaskStatusEvent): Unit = {} + + override def onTaskErrorResponseEvent(taskErrorResponseEvent: TaskErrorResponseEvent): Unit = {} + + override def onResultSetCreate(taskResultSetEvent: TaskResultSetEvent): Unit = {} + + override def onResultSizeCreated(taskResultSetSizeEvent: TaskResultSetSizeEvent): Unit = {} + + override def onSyncEvent(event: OrchestratorSyncEvent): Unit = {} + + override def onEventError(event: Event, t: Throwable): Unit = {} +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala index 627ab82b8e..bceb0f4f57 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala @@ -18,7 +18,6 @@ package org.apache.linkis.entrance.interceptor.impl import org.apache.linkis.common.utils.CodeAndRunTypeUtils -import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.interceptor.EntranceInterceptor import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.manager.label.utils.LabelUtil @@ -70,8 +69,38 @@ trait CommentHelper { object SQLCommentHelper extends CommentHelper { override val commentPattern: Regex = """\s*--.+\s*""".r.unanchored private val comment = "(?ms)('(?:''|[^'])*')|--.*?$|/\\*.*?\\*/|#.*?$|" + private val comment_sem = "(?i)(comment)\\s+'([^']*)'" private val logger: Logger = LoggerFactory.getLogger(getClass) + def replaceComment(code: String): String = { + try { + val pattern = Pattern.compile(comment_sem) + val matcher = pattern.matcher(code) + val sb = new StringBuffer + while (matcher.find()) { + val commentKeyword = matcher.group(1) + val comment = matcher.group(2) + + /** + * Since we are in a Scala string, and each backslash needs to be escaped in the string + * itself, we need two additional backslashes. Therefore, we end up with a total of four + * backslashes to represent a single literal backslash in the replacement string. + */ + val escapedComment = comment.replaceAll(";", "\\\\\\\\;") + matcher.appendReplacement(sb, commentKeyword + " '" + escapedComment + "'") + } + matcher.appendTail(sb) + sb.toString + } catch { + case e: Exception => + logger.warn("sql comment semicolon replace failed") + code + case t: Throwable => + logger.warn("sql comment semicolon replace failed") + code + } + } + override def dealComment(code: String): String = { try { val p = Pattern.compile(comment) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala index 7a7cb7463a..d9386477e0 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala @@ -18,6 +18,7 @@ package org.apache.linkis.entrance.interceptor.impl import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils, VariableUtils} import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.manager.label.utils.LabelUtil @@ -42,7 +43,11 @@ object CustomVariableUtils extends Logging { * : requestPersistTask * @return */ - def replaceCustomVar(jobRequest: JobRequest, runType: String): String = { + def replaceCustomVar( + jobRequest: JobRequest, + runType: String, + logAppender: java.lang.StringBuilder + ): String = { val variables: util.Map[String, String] = new util.HashMap[String, String]() val sender = Sender.getSender(Configuration.CLOUD_CONSOLE_VARIABLE_SPRING_APPLICATION_NAME.getValue) @@ -65,9 +70,26 @@ object CustomVariableUtils extends Logging { .getVariableMap(jobRequest.getParams) .asInstanceOf[util.HashMap[String, String]] variables.putAll(variableMap) - if (!variables.containsKey("user")) { - variables.put("user", jobRequest.getExecuteUser) + variables.put("user", jobRequest.getExecuteUser) + // User customization is not supported. If the user has customized it, add a warning log and replace it + if (variables.containsKey("submit_user")) { + logAppender.append( + LogUtils.generateInfo( + "submitUser variable will be replaced by system value:" + jobRequest.getSubmitUser + " -> " + variables + .get("submit_user") + "\n" + ) + ) } + if (variables.containsKey("execute_user")) { + logAppender.append( + LogUtils.generateInfo( + "executeUser variable will be replaced by system value:" + jobRequest.getExecuteUser + " -> " + variables + .get("execute_user") + "\n" + ) + ) + } + variables.put("execute_user", jobRequest.getExecuteUser) + variables.put("submit_user", jobRequest.getSubmitUser) VariableUtils.replace(jobRequest.getExecutionCode, runType, variables) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala index 8436ccc711..045cb51f88 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala @@ -57,6 +57,8 @@ object SparkExplain extends Explain { private val sy = Pattern.compile("sys\\.") private val scCancelAllJobs = Pattern.compile("sc\\.cancelAllJobs(\\s*)") private val runtime = Pattern.compile("Runtime\\.getRuntime") + private val LINE_BREAK = "\n" + private val LOG: Logger = LoggerFactory.getLogger(getClass) override def authPass(code: String, error: StringBuilder): Boolean = { if (EntranceConfiguration.SKIP_AUTH.getHotValue()) { @@ -99,6 +101,7 @@ object SQLExplain extends Explain { private val LIMIT: String = "limit" private val LIMIT_UPPERCASE: String = "LIMIT" private val IDE_ALLOW_NO_LIMIT = "--set wds.linkis.engine.no.limit.allow=true" + private val LOG: Logger = LoggerFactory.getLogger(getClass) override def authPass(code: String, error: StringBuilder): Boolean = { true @@ -118,7 +121,8 @@ object SQLExplain extends Explain { logAppender: java.lang.StringBuilder ): Unit = { val fixedCode: ArrayBuffer[String] = new ArrayBuffer[String]() - val tempCode = SQLCommentHelper.dealComment(executionCode) + val tempCode1 = SQLCommentHelper.dealComment(executionCode) + val tempCode = SQLCommentHelper.replaceComment(tempCode1) val isNoLimitAllowed = Utils.tryCatch { IDE_ALLOW_NO_LIMIT_REGEX.findFirstIn(executionCode).isDefined } { case e: Exception => @@ -131,6 +135,8 @@ object SQLExplain extends Explain { .generateWarn("please pay attention ,SQL full export mode opened(请注意,SQL全量导出模式打开)\n") ) } + var isFirstTimePrintingLimit = true + var isFirstTimePrintingOverLimit = true if (tempCode.contains("""\;""")) { val semicolonIndexes = findRealSemicolonIndex(tempCode) var oldIndex = 0 @@ -140,20 +146,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } + // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -167,20 +180,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } + // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -210,6 +230,8 @@ object SQLExplain extends Explain { array.toArray } + private def addNoLimit(code: String) = code + NO_LIMIT_STRING + protected def needNoLimit(code: String): Boolean = code.endsWith(NO_LIMIT_STRING) def isSelectCmd(code: String): Boolean = { @@ -217,16 +239,17 @@ object SQLExplain extends Explain { return false } val realCode = cleanComment(code) - realCode.trim.split("\\s+")(0).toLowerCase().contains("select") + realCode.trim.split("\\s+")(0).toLowerCase(Locale.getDefault).contains("select") } - def continueWhenError: Boolean = false + // def continueWhenError = false def isSelectCmdNoLimit(cmd: String): Boolean = { if (StringUtils.isEmpty(cmd)) { return false } val realCode = cmd.trim + // limit is often the last in a sql statement, so you need to make a final judgment val arr = realCode.split("\\s+") val words = new ArrayBuffer[String]() arr foreach { w => @@ -235,8 +258,10 @@ object SQLExplain extends Explain { val a = words.toArray val length = a.length if (a.length > 1) { - val second_last = a(length - 2) - !"limit".equals(second_last.toLowerCase()) + val second_last = a(length - 2).toLowerCase(Locale.getDefault) + // for some case eg:"SELECT * from dual WHERE (1=1)LIMIT 1;" + val result = !("limit".equals(second_last) || second_last.contains(")limit")) + result } else { false } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala index d05dce4bc4..50b23df263 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala @@ -18,6 +18,7 @@ package org.apache.linkis.entrance.interceptor.impl import org.apache.linkis.common.utils.Logging +import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.interceptor.EntranceInterceptor import org.apache.linkis.governance.common.conf.GovernanceCommonConf @@ -44,6 +45,34 @@ class StorePathEntranceInterceptor extends EntranceInterceptor with Logging { * @return */ override def apply(jobReq: JobRequest, logAppender: java.lang.StringBuilder): JobRequest = { + val paramsMap = if (null != jobReq.getParams) { + jobReq.getParams + } else { + new util.HashMap[String, AnyRef]() + } + var runtimeMap = TaskUtils.getRuntimeMap(paramsMap) + if (null == runtimeMap || runtimeMap.isEmpty) { + runtimeMap = new util.HashMap[String, AnyRef]() + } + if (runtimeMap.containsKey(GovernanceCommonConf.RESULT_SET_STORE_PATH.key)) { + return jobReq + } + if (EntranceConfiguration.ENABLE_HDFS_RES_DIR_PRIVATE) { + val parentPath = generateUserPrivateResDir(jobReq) + runtimeMap.put(GovernanceCommonConf.RESULT_SET_STORE_PATH.key, parentPath) + TaskUtils.addRuntimeMap(paramsMap, runtimeMap) + val params = new util.HashMap[String, AnyRef]() + paramsMap.asScala.foreach(kv => params.put(kv._1, kv._2)) + jobReq.setResultLocation(parentPath) + jobReq.setParams(params) + jobReq + } else { + jobReq + } + + } + + private def generateUserPrivateResDir(jobReq: JobRequest): String = { var parentPath: String = GovernanceCommonConf.RESULT_SET_STORE_PATH.getValue if (!parentPath.endsWith("/")) parentPath += "/" parentPath += jobReq.getExecuteUser @@ -61,23 +90,7 @@ class StorePathEntranceInterceptor extends EntranceInterceptor with Logging { // multi linkis cluster should not use same root folder , in which case result file may be overwrite parentPath += DateFormatUtils.format(System.currentTimeMillis, "yyyy-MM-dd/HHmmss") + "/" + userCreator._2 + "/" + jobReq.getId - val paramsMap = if (null != jobReq.getParams) { - jobReq.getParams - } else { - new util.HashMap[String, AnyRef]() - } - - var runtimeMap = TaskUtils.getRuntimeMap(paramsMap) - if (null == runtimeMap || runtimeMap.isEmpty) { - runtimeMap = new util.HashMap[String, AnyRef]() - } - runtimeMap.put(GovernanceCommonConf.RESULT_SET_STORE_PATH.key, parentPath) - TaskUtils.addRuntimeMap(paramsMap, runtimeMap) - val params = new util.HashMap[String, AnyRef]() - paramsMap.asScala.foreach(kv => params.put(kv._1, kv._2)) - jobReq.setResultLocation(parentPath) - jobReq.setParams(params) - jobReq + parentPath } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/OperateRequest.java b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala similarity index 59% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/OperateRequest.java rename to linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala index 3e8e52b62d..6accd30bd5 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/OperateRequest.java +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala @@ -15,25 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.protocol; +package org.apache.linkis.entrance.interceptor.impl -import org.apache.linkis.governance.common.exception.GovernanceErrorException; +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.interceptor.EntranceInterceptor +import org.apache.linkis.governance.common.entity.job.JobRequest -import java.util.Map; +import java.lang -public interface OperateRequest { - String getUser(); +class TemplateConfInterceptor extends EntranceInterceptor { - Map getParameters(); - - String OPERATOR_NAME_KEY = "__operator_name__"; - - static String getOperationName(Map parameters) { - Object obj = parameters.get(OPERATOR_NAME_KEY); - if (obj instanceof String) { - return (String) obj; + override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + if (EntranceConfiguration.TEMPLATE_CONF_SWITCH.getValue) { + TemplateConfUtils.dealWithTemplateConf(jobRequest, logAppender) } else { - throw new GovernanceErrorException(20031, OPERATOR_NAME_KEY + " does not exist."); + jobRequest } } + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala new file mode 100644 index 0000000000..99ae8b07df --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala @@ -0,0 +1,278 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.exception.LinkisCommonErrorException +import org.apache.linkis.common.log.LogUtils +import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.governance.common.entity.TemplateConfKey +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.conf.{TemplateConfRequest, TemplateConfResponse} +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.{lang, util} +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ + +import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} + +object TemplateConfUtils extends Logging { + + val confTemplateNameKey = "ec.resource.name" + + private val templateCache: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateUuid: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateUuid:$templateUuid") + val res = sender.ask(new TemplateConfRequest(templateUuid)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateUuid:$templateUuid loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + private val templateCacheName: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateName: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateName:$templateName") + val res = sender.ask(new TemplateConfRequest(null, templateName)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateName:$templateName loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + /** + * Get user-defined template conf name value + * + * @param code + * :code + * @param codeType + * :sql,hql,scala + * @return + * String the last one of template conf name + */ + def getCustomTemplateConfName(code: String, codeType: String): String = { + var templateConfName = ""; + + var varString: String = null + var errString: String = null + var rightVarString: String = null + + val languageType = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType) + + languageType match { + case CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL => + varString = s"""\\s*---@set ${confTemplateNameKey}=\\s*.+\\s*""" + errString = """\s*---@.*""" + case CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON | CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL => + varString = s"""\\s*##@set ${confTemplateNameKey}=\\s*.+\\s*""" + errString = """\s*##@""" + case CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA => + varString = s"""\\s*///@set ${confTemplateNameKey}=\\s*.+\\s*""" + errString = """\s*///@.+""" + case _ => + return templateConfName + } + + val customRegex = varString.r.unanchored + val errRegex = errString.r.unanchored + var codeRes = code.replaceAll("\r\n", "\n") + // only allow set at fisrt line + val res = codeRes.split("\n") + if (res.size > 0) { + val str = res(0) + str match { + case customRegex() => + val clearStr = if (str.endsWith(";")) str.substring(0, str.length - 1) else str + val res: Array[String] = clearStr.split("=") + if (res != null && res.length == 2) { + templateConfName = res(1).trim + logger.info(s"get template conf name $templateConfName") + } else { + if (res.length > 2) { + throw new LinkisCommonErrorException( + 20044, + s"$str template conf name var defined uncorrectly" + ) + } else { + throw new LinkisCommonErrorException( + 20045, + s"template conf name var was defined uncorrectly:$str" + ) + } + } + case errRegex() => + logger.warn( + s"The template conf name var definition is incorrect:$str,if it is not used, it will not run the error, but it is recommended to use the correct specification to define" + ) + case _ => + } + } + templateConfName + } + + def dealWithTemplateConf(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + jobRequest match { + case requestPersistTask: JobRequest => + val params = requestPersistTask.getParams + val startMap = TaskUtils.getStartupMap(params) + + var templateConflist: util.List[TemplateConfKey] = new util.ArrayList[TemplateConfKey]() + var templateName: String = "" + // only for Creator:IDE, try to get template conf name from code string. eg:---@set ec.resource.name=xxxx + val (user, creator) = LabelUtil.getUserCreator(jobRequest.getLabels) + if (EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue.equals(creator)) { + val codeType = LabelUtil.getCodeType(jobRequest.getLabels) + templateName = + TemplateConfUtils.getCustomTemplateConfName(jobRequest.getExecutionCode, codeType) + } + + // code template name > start params template uuid + if (StringUtils.isBlank(templateName)) { + logger.debug("jobRequest startMap param template name is empty") + + logger.info("jobRequest startMap params :{} ", startMap) + val templateUuid = startMap.getOrDefault(LabelKeyConstant.TEMPLATE_CONF_KEY, "").toString + + if (StringUtils.isBlank(templateUuid)) { + logger.debug("jobRequest startMap param template id is empty") + } else { + logger.info("try to get template conf list with template uid:{} ", templateUuid) + logAppender.append( + LogUtils + .generateInfo(s"Try to get template conf data with template uid:$templateUuid\nn") + ) + templateConflist = templateCache.get(templateUuid) + if (templateConflist == null || templateConflist.size() == 0) { + logAppender.append( + LogUtils.generateWarn( + s"Can not get any template conf data with template uid:$templateUuid\n" + ) + ) + } + } + } else { + logger.info("Try to get template conf list with template name:[{}]", templateName) + logAppender.append( + LogUtils + .generateInfo(s"Try to get template conf data with template name:[$templateName]\n") + ) + templateConflist = templateCacheName.get(templateName) + if (templateConflist == null || templateConflist.size() == 0) { + logAppender.append( + LogUtils.generateWarn( + s"Can not get any template conf data with template name:$templateName\n" + ) + ) + } else { + // to remove metedata start param + TaskUtils.clearStartupMap(params) + + if (EntranceConfiguration.TEMPLATE_CONF_ADD_ONCE_LABEL_ENABLE.getValue) { + val onceLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel( + classOf[ExecuteOnceLabel] + ) + logger.info("Add once label for task id:{}", requestPersistTask.getId.toString) + requestPersistTask.getLabels.add(onceLabel) + } + } + } + + if (templateConflist != null && templateConflist.size() > 0) { + val keyList = new util.HashMap[String, AnyRef]() + templateConflist.asScala.foreach(ele => { + val key = ele.getKey + val oldValue = startMap.get(key) + if (oldValue != null && StringUtils.isNotBlank(oldValue.toString)) { + logger.info(s"key:$key value:$oldValue not empty, skip to deal") + } else { + val newValue = ele.getConfigValue + logger.info(s"key:$key value:$newValue will add to startMap params") + if (TaskUtils.isWithDebugInfo(params)) { + logAppender.append(LogUtils.generateInfo(s"add $key=$newValue\n")) + } + keyList.put(key, newValue) + } + + }) + if (keyList.size() > 0) { + TaskUtils.addStartupMap(params, keyList) + } + } + + case _ => + } + jobRequest + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala index 573c134493..653e9ad78b 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala @@ -67,7 +67,7 @@ object UserCreatorIPCheckUtils extends Logging { def checkUserIp(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { // Get IP address - val jobIp = jobRequest.getSource.get(TaskConstant.REQUEST_IP) + val jobIp = jobRequest.getSource.getOrDefault(TaskConstant.REQUEST_IP, "") logger.debug(s"start to checkTenantLabel $jobIp") if (StringUtils.isNotBlank(jobIp)) { jobRequest match { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala index 0487a238cf..b761e20ae9 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala @@ -41,10 +41,26 @@ class VarSubstitutionInterceptor extends EntranceInterceptor { LogUtils.generateInfo("Program is substituting variables for you") + "\n" ) val codeType = LabelUtil.getCodeType(jobRequest.getLabels) - jobRequest.setExecutionCode(CustomVariableUtils.replaceCustomVar(jobRequest, codeType)) + val realCode = CustomVariableUtils.replaceCustomVar(jobRequest, codeType, logAppender) + jobRequest.setExecutionCode(realCode) logAppender.append( LogUtils.generateInfo("Variables substitution ended successfully") + "\n" ) + // print code after variables substitution + logAppender.append( + LogUtils.generateInfo( + "You have submitted a new job, script code (after variable substitution) is" + ) + "\n" + ); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ) + logAppender.append(realCode); + logAppender.append("\n"); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ); + jobRequest } { case e: VarSubstitutionException => diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala index 44474ee0ca..3c5173a807 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala @@ -18,7 +18,12 @@ package org.apache.linkis.entrance.log class Cache(maxCapacity: Int) { - val cachedLogs: LoopArray[String] = LoopArray[String](maxCapacity) + var cachedLogs: LoopArray[String] = LoopArray[String](maxCapacity) + + def clearCachedLogs(): Unit = { + this.cachedLogs = null + } + } object Cache { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala index 483cf9ab43..406d43e5bc 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala @@ -19,7 +19,11 @@ package org.apache.linkis.entrance.log import org.apache.linkis.common.io.{Fs, FsPath} import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.exception.LogReadFailedException import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils import java.io.{InputStream, IOException} import java.util @@ -36,13 +40,26 @@ class CacheLogReader(logPath: String, charset: String, sharedCache: Cache, user: var closed = false private def createInputStream: InputStream = { + if (!logPath.contains(user)) { + throw new LogReadFailedException( + s"${user} does not have permission to read the path $logPath" + ) + } + val fsPath = new FsPath(logPath) if (fileSystem == null) lock synchronized { if (fileSystem == null) { - fileSystem = FSFactory.getFsByProxyUser(new FsPath(logPath), user) + + fileSystem = + if (StorageUtils.isHDFSPath(fsPath) && EntranceConfiguration.ENABLE_HDFS_JVM_USER) { + FSFactory.getFs(new FsPath(logPath)).asInstanceOf[FileSystem] + } else { + FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] + } + fileSystem.init(new util.HashMap[String, String]()) } } - val inputStream: InputStream = fileSystem.read(new FsPath(logPath)) + val inputStream: InputStream = fileSystem.read(fsPath) inputStream } @@ -51,21 +68,39 @@ class CacheLogReader(logPath: String, charset: String, sharedCache: Cache, user: } override protected def readLog(deal: String => Unit, fromLine: Int, size: Int): Int = { - if (!sharedCache.cachedLogs.nonEmpty) return super.readLog(deal, fromLine, size) + if (sharedCache.cachedLogs == null || sharedCache.cachedLogs.isEmpty) { + return super.readLog(deal, fromLine, size) + } val min = sharedCache.cachedLogs.min val max = sharedCache.cachedLogs.max + + val fakeClearEleNums = sharedCache.cachedLogs.fakeClearEleNums + if (fromLine > max) return 0 - val from = fromLine - val to = if (fromLine >= min) { - if (size >= 0 && max >= fromLine + size) fromLine + size else max + 1 - } else { + + var from = fromLine + val end = + if (size >= 0 && max >= fromLine + size) { + fromLine + size + } else { + max + 1 + } + + var readNums = 0 + // The log may have been refreshed to the log file regularly and cannot be determined based on min. + if (fromLine < fakeClearEleNums) { // If you are getting it from a file, you don't need to read the cached data again. In this case, you can guarantee that the log will not be missing. - val read = super.readLog(deal, fromLine, size) - return read - } + readNums = super.readLog(deal, fromLine, size) + if ((fromLine + size) < min) { + return readNums + } else { + from = from + readNums + } + } else {} + + (from until end) map sharedCache.cachedLogs.get foreach deal + end - from + readNums - (from until to) map sharedCache.cachedLogs.get foreach deal - to - fromLine } @throws[IOException] diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala index 9028c469ab..8f1cea1b18 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala @@ -33,20 +33,26 @@ class CacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: def getCache: Option[Cache] = Some(sharedCache) private def cache(msg: String): Unit = { + if (sharedCache.cachedLogs == null) { + return + } this synchronized { - val removed = sharedCache.cachedLogs.add(msg) + val isNextOneEmpty = sharedCache.cachedLogs.isNextOneEmpty val currentTime = new Date(System.currentTimeMillis()) - if (removed != null || currentTime.after(pushTime)) { + + if (isNextOneEmpty == false || currentTime.after(pushTime)) { val logs = sharedCache.cachedLogs.toList val sb = new StringBuilder - if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) + // need append latest msg before clear + sb.append(msg) sharedCache.cachedLogs.fakeClear() super.write(sb.toString()) pushTime.setTime( currentTime.getTime + EntranceConfiguration.LOG_PUSH_INTERVAL_TIME.getValue ) } + sharedCache.cachedLogs.add(msg) } } @@ -63,10 +69,12 @@ class CacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: override def flush(): Unit = { val sb = new StringBuilder - sharedCache.cachedLogs.toList - .filter(StringUtils.isNotEmpty) - .foreach(sb.append(_).append("\n")) - sharedCache.cachedLogs.clear() + if (sharedCache.cachedLogs != null) { + sharedCache.cachedLogs.toList + .filter(StringUtils.isNotEmpty) + .foreach(sb.append(_).append("\n")) + sharedCache.cachedLogs.clear() + } super.write(sb.toString()) super.flush() } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala index 54914b6002..4b082342ce 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala @@ -18,6 +18,12 @@ package org.apache.linkis.entrance.log import org.apache.linkis.errorcode.client.handler.LinkisErrorCodeHandler +import org.apache.linkis.errorcode.client.manager.LinkisErrorCodeManager +import org.apache.linkis.errorcode.common.LinkisErrorCode + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter abstract class ErrorCodeManager { @@ -35,6 +41,22 @@ abstract class ErrorCodeManager { None } + def errorMatchAndGetContent(log: String): Option[(String, String, String)] = { + getErrorCodes.foreach(e => + if (e.regex.findFirstIn(log).isDefined) { + val matched = e.regex.unapplySeq(log) + if (matched.nonEmpty) { + return Some( + e.code, + e.message.format(matched.get: _*), + e.regex.findFirstIn(log).getOrElse("") + ) + } else Some(e.code, e.message, "") + } + ) + None + } + } /** @@ -44,7 +66,24 @@ object FlexibleErrorCodeManager extends ErrorCodeManager { private val errorCodeHandler = LinkisErrorCodeHandler.getInstance() - override def getErrorCodes: Array[ErrorCode] = Array.empty + private val linkisErrorCodeManager = LinkisErrorCodeManager.getInstance + + override def getErrorCodes: Array[ErrorCode] = { + val errorCodes: util.List[LinkisErrorCode] = linkisErrorCodeManager.getLinkisErrorCodes + if (errorCodes == null) { + Array.empty + } else { + errorCodes.asScala + .map(linkisErrorCode => + ErrorCode( + linkisErrorCode.getErrorRegex, + linkisErrorCode.getErrorCode, + linkisErrorCode.getErrorDesc + ) + ) + .toArray + } + } override def errorMatch(log: String): Option[(String, String)] = { val errorCodes = errorCodeHandler.handle(log) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala index 24633dfbb2..ff04640afa 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala @@ -37,11 +37,15 @@ import java.util class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: String) extends LogWriter(charset) { - if (StringUtils.isBlank(logPath)) + if (StringUtils.isBlank(logPath)) { throw new EntranceErrorException(LOGPATH_NOT_NULL.getErrorCode, LOGPATH_NOT_NULL.getErrorDesc) + } - protected var fileSystem = + protected var fileSystem = if (EntranceConfiguration.ENABLE_HDFS_JVM_USER) { + FSFactory.getFs(new FsPath(logPath)).asInstanceOf[FileSystem] + } else { FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] + } override protected var outputStream: OutputStream = null @@ -55,7 +59,12 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u private def init(): Unit = { fileSystem.init(new util.HashMap[String, String]()) - FileSystemUtils.createNewFileWithFileSystem(fileSystem, new FsPath(logPath), user, true) + FileSystemUtils.createNewFileAndSetOwnerWithFileSystem( + fileSystem, + new FsPath(logPath), + user, + true + ) } @throws[IOException] @@ -91,13 +100,15 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u def getCache: Option[Cache] = Some(sharedCache) private def cache(msg: String): Unit = { + if (sharedCache.cachedLogs == null) { + return + } WRITE_LOCKER synchronized { - val removed = sharedCache.cachedLogs.add(msg) + val isNextOneEmpty = sharedCache.cachedLogs.isNextOneEmpty val currentTime = new Date(System.currentTimeMillis()) - if (removed != null || currentTime.after(pushTime)) { + if (isNextOneEmpty == false || currentTime.after(pushTime)) { val logs = sharedCache.cachedLogs.toList val sb = new StringBuilder - if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) sharedCache.cachedLogs.fakeClear() writeToFile(sb.toString()) @@ -105,17 +116,17 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u currentTime.getTime + EntranceConfiguration.LOG_PUSH_INTERVAL_TIME.getValue ) } + sharedCache.cachedLogs.add(msg) } } private def writeToFile(msg: String): Unit = WRITE_LOCKER synchronized { - val log = - if (!firstWrite) "\n" + msg - else { - logger.info(s"$toString write first one line log") - firstWrite = false - msg - } + val log = msg + if (firstWrite) { + logger.info(s"$toString write first one line log") + firstWrite = false + msg + } Utils.tryAndWarnMsg { getOutputStream.write(log.getBytes(charset)) }(s"$toString error when write query log to outputStream.") @@ -133,10 +144,12 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u override def flush(): Unit = { val sb = new StringBuilder - sharedCache.cachedLogs.toList - .filter(_ != null) - .foreach(sb.append(_).append("\n")) - sharedCache.cachedLogs.clear() + if (sharedCache.cachedLogs != null) { + sharedCache.cachedLogs.toList + .filter(_ != null) + .foreach(sb.append(_).append("\n")) + sharedCache.cachedLogs.clear() + } writeToFile(sb.toString()) } @@ -146,6 +159,7 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u fileSystem.close() fileSystem = null }(s"$toString Error encounters when closing fileSystem") + sharedCache.clearCachedLogs() } override def toString: String = logPath diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala index 626a643a0b..19f4c5c6ad 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala @@ -17,6 +17,7 @@ package org.apache.linkis.entrance.log +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.EntranceContext import org.apache.linkis.entrance.job.EntranceExecutionJob @@ -59,10 +60,19 @@ abstract class LogManager extends LogListener with Logging { } } } - entranceExecutionJob.getLogWriter.foreach(logWriter => logWriter.write(log)) - errorCodeManager.foreach(_.errorMatch(log).foreach { case (code, errorMsg) => - errorCodeListener.foreach(_.onErrorCodeCreated(job, code, errorMsg)) + var writeLog = log + errorCodeManager.foreach(_.errorMatchAndGetContent(log).foreach { + case (code, errorMsg, targetMsg) => + if (!targetMsg.contains(LogUtils.ERROR_STR) && log.contains(LogUtils.ERROR_STR)) { + writeLog = LogUtils.generateERROR( + s"error code: $code, errorMsg: $errorMsg, errorLine: $targetMsg \n" + log + ) + } + errorCodeListener.foreach(_.onErrorCodeCreated(job, code, errorMsg)) + case _ => }) + entranceExecutionJob.getLogWriter.foreach(logWriter => logWriter.write(writeLog)) + case _ => } } { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala index 1d5f0cbda9..da7f058fd8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala @@ -115,8 +115,11 @@ abstract class LogReader(charset: String) extends Closeable with Logging { } protected def readLog(deal: String => Unit, fromLine: Int, size: Int = 100): Int = { - val from = if (fromLine < 0) 0 else fromLine - var line, read = 0 + + // fromline param with begin 1 ,if set 0 missing first line + val from = if (fromLine < 1) 1 else fromLine + var line = 1 + var read = 0 val inputStream = getInputStream val lineIterator = IOUtils.lineIterator(inputStream, charset) Utils.tryFinally(while (lineIterator.hasNext && (read < size || size < 0)) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala index 155d8c7bd5..ff0dfbba84 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala @@ -23,21 +23,33 @@ class LoopArray[T](maxCapacity: Int) { def this() = this(32) + // realSize 游标之前的数据 已经被重写覆盖了 + // The data before realSize cursor has been overwritten by rewriting protected[this] var realSize = 0 - private var flag = 0 + + // the loop begin indx + private var front = 0 + + // the loop last index + // 尾部 下一个存储的游标 private var tail = 0 + private var clearEleNums = 0 + def add(event: T): T = { var t = null.asInstanceOf[T] eventQueue synchronized { - val index = (tail + 1) % maxCapacity - if (index == flag) { - flag = (flag + 1) % maxCapacity + val nextIndex = (tail + 1) % maxCapacity + // 首尾相遇 第一次循环队列满了,后续所有add动作 nextIndex和front都是相等的 front指针不断往前循环移动 + // When the first and last ends meet, the first circular queue is full, and all subsequent add actions nextIndex and front are equal. + // The front pointer continues to move forward in a circular motion. + if (nextIndex == front) { + front = (front + 1) % maxCapacity realSize += 1 } t = eventQueue(tail).asInstanceOf[T] eventQueue(tail) = event - tail = index + tail = nextIndex } t } @@ -51,18 +63,19 @@ class LoopArray[T](maxCapacity: Int) { } else if (index > _max) { throw new IllegalArgumentException("The index " + index + " must be less than " + _max) } - val _index = (flag + (index - realSize)) % maxCapacity + val _index = (front + (index - realSize + maxCapacity - 1)) % maxCapacity eventQueue(_index).asInstanceOf[T] } def clear(): Unit = eventQueue synchronized { - flag = 0 + front = 0 tail = 0 realSize = 0 (0 until maxCapacity).foreach(eventQueue(_) = null) } def fakeClear(): Unit = eventQueue synchronized { + clearEleNums = clearEleNums + size (0 until maxCapacity).foreach(eventQueue(_) = null) } @@ -73,16 +86,34 @@ class LoopArray[T](maxCapacity: Int) { if (_size == 0) { _size = 1 } - realSize + _size - 1 + realSize + _size } - private def filledSize = if (tail >= flag) tail - flag else tail + maxCapacity - flag + def fakeClearEleNums: Int = clearEleNums + + private def filledSize = { + if (tail == front && tail == 0) { + 0 + } else if (tail > front) { + tail - front + } else { + tail + maxCapacity - front + } + } def size: Int = filledSize def isFull: Boolean = filledSize == maxCapacity - 1 - def nonEmpty: Boolean = size > 0 + // If it is not empty, it means that the loop queue is full this round. + // 不为空 说明本轮 循环队列满了 + def isNextOneEmpty(): Boolean = { + + eventQueue(tail).asInstanceOf[T] == null + + } + + def isEmpty: Boolean = size == 0 def toList: List[T] = toIndexedSeq.toList diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala index 4b9b4570f1..e5c657023e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala @@ -20,19 +20,15 @@ package org.apache.linkis.entrance.orchestrator.plugin import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.orchestrator.plugin.UserParallelOrchestratorPlugin import org.apache.linkis.rpc.Sender -import org.apache.linkis.server.BDPJettyServerHelper - -import org.apache.commons.lang3.StringUtils import java.util import java.util.concurrent.TimeUnit @@ -43,10 +39,6 @@ import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlugin with Logging { - private val SPLIT = "," - - private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - private def getDefaultMaxRuningNum: Int = { EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue() } @@ -62,7 +54,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu .build(new CacheLoader[String, Integer]() { override def load(key: String): Integer = { - val (userCreatorLabel, engineTypeLabel) = fromKeyGetLabels(key) + val (userCreatorLabel, engineTypeLabel) = EntranceUtils.fromKeyGetLabels(key) val keyAndValue = Utils.tryAndWarnMsg { sender .ask(RequestQueryEngineConfigWithGlobalConfig(userCreatorLabel, engineTypeLabel)) @@ -75,10 +67,8 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu null == keyAndValue || !keyAndValue .containsKey(EntranceConfiguration.WDS_LINKIS_INSTANCE.key) ) { - logger.error( - s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + - s"will use default value ${EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue()}。All config map: ${BDPJettyServerHelper.gson - .toJson(keyAndValue)}" + logger.warn( + s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + s"will use default value " ) } val maxRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue, true) @@ -102,27 +92,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu if (null == userCreatorLabel || null == engineTypeLabel) { return getDefaultMaxRuningNum } - configCache.get(getKey(userCreatorLabel, engineTypeLabel)) - } - - private def getKey( - userCreatorLabel: UserCreatorLabel, - engineTypeLabel: EngineTypeLabel - ): String = { - userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue - } - - private def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { - if (StringUtils.isBlank(key)) (null, null) - else { - val labelStringValues = key.split(SPLIT) - if (labelStringValues.length < 2) return (null, null) - val userCreatorLabel = labelFactory - .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) - val engineTypeLabel = labelFactory - .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) - (userCreatorLabel, engineTypeLabel) - } + configCache.get(EntranceUtils.getUserCreatorEcTypeKey(userCreatorLabel, engineTypeLabel)) } override def isReady: Boolean = true diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala index afc18bdc19..58fc1f45c3 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala @@ -195,7 +195,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) } private def generateAndVerifyClusterLabel(labels: util.Map[String, Label[_]]): Unit = { - if (!Configuration.IS_MULTIPLE_YARN_CLUSTER.getValue.asInstanceOf[Boolean]) { + if (!Configuration.IS_MULTIPLE_YARN_CLUSTER) { return } var clusterLabel = labels diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala index 02d1a6a08e..2ba98438e8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala @@ -17,20 +17,12 @@ package org.apache.linkis.entrance.persistence -import org.apache.linkis.common.io.{FsPath, MetaData, Record} -import org.apache.linkis.common.io.resultset.ResultSet -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} -import org.apache.linkis.entrance.execute.StorePathExecuteRequest -import org.apache.linkis.entrance.job.{EntranceExecuteRequest, EntranceExecutionJob} -import org.apache.linkis.entrance.scheduler.cache.CacheOutputExecuteResponse -import org.apache.linkis.governance.common.entity.job.SubJobDetail import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.scheduler.queue.Job -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriterFactory} -import org.apache.linkis.storage.utils.FileSystemUtils +import org.apache.linkis.storage.resultset.ResultSetFactory -import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils class EntranceResultSetEngine extends ResultSetEngine with Logging { @@ -46,15 +38,11 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc ) } - case CacheOutputExecuteResponse(alias, output) => - if (ResultSetFactory.getInstance.isResultSetPath(output)) { - getDir(output) - } else { - throw new EntranceErrorException( - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc - ) - } + case _ => + throw new EntranceErrorException( + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc + ) } } @@ -64,7 +52,7 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { } else { val arr = str.split("/").filter(StringUtils.isNotBlank) if (arr.length <= 2) { - return str + str } else { str.substring(0, str.lastIndexOf("/")) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala new file mode 100644 index 0000000000..26d8a60c4c --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.job.EntranceExecutionJob +import org.apache.linkis.entrance.utils.JobHistoryHelper +import org.apache.linkis.scheduler.SchedulerContext +import org.apache.linkis.scheduler.queue.Group +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer + +import java.util +import java.util.concurrent.ExecutorService + +import scala.collection.JavaConverters.collectionAsScalaIterableConverter + +class EntranceFIFOUserConsumer( + schedulerContext: SchedulerContext, + executeService: ExecutorService, + private var group: Group +) extends FIFOUserConsumer(schedulerContext, executeService, group) + with Logging { + + override def loop(): Unit = { + // When offlineFlag=true, the unsubmitted tasks will be failover, and the running tasks will wait for completion. + // In this case,super.loop only submits the retry task, but the retry task can failover and speed up the entrance offline + // (当offlineFlag=true时,未提交任务会被故障转移,运行中任务会等待完成.此时super.loop只会提交重试任务,但是重试任务完全可以故障转移,加快entrance下线) + schedulerContext match { + case entranceSchedulerContext: EntranceSchedulerContext => + if ( + entranceSchedulerContext.getOfflineFlag && EntranceConfiguration.ENTRANCE_FAILOVER_RETRY_JOB_ENABLED.getValue + ) { + val jobs = scanAllRetryJobsAndRemove() + if (!jobs.isEmpty) { + val ids = new util.ArrayList[Long]() + jobs.asScala.foreach { + case entranceJob: EntranceExecutionJob => + entranceJob.getLogWriter.foreach(_.close()) + ids.add(entranceJob.getJobRequest.getId) + case _ => + } + JobHistoryHelper.updateBatchInstancesEmpty(ids) + } + Utils.tryQuietly(Thread.sleep(5000)) + return + } + case _ => + } + + // general logic + super.loop() + + } + + override def runScheduleIntercept: Boolean = { + val consumers = getSchedulerContext.getOrCreateConsumerManager.listConsumers + var creatorRunningJobNum = 0 + // APP_TEST_hadoop_hive or IDE_hadoop_hive + val groupNameStr = getGroup.getGroupName + val groupNames = groupNameStr.split("_") + val length = groupNames.length + if (length < 3) return true + // APP_TEST + val lastIndex = groupNameStr.lastIndexOf("_") + val secondLastIndex = groupNameStr.lastIndexOf("_", lastIndex - 1) + val creatorName = groupNameStr.substring(0, secondLastIndex) + // hive + val ecType = groupNames(length - 1) + for (consumer <- consumers) { + val groupName = consumer.getGroup.getGroupName + if (groupName.startsWith(creatorName) && groupName.endsWith(ecType)) { + creatorRunningJobNum += consumer.getRunningEvents.length + } + } + val creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creatorName, ecType) + if (logger.isDebugEnabled) { + logger.debug( + s"Creator: $creatorName EC:$ecType there are currently:$creatorRunningJobNum jobs running and maximum limit: $creatorECTypeMaxRunningJobs" + ) + } + if (creatorRunningJobNum > creatorECTypeMaxRunningJobs) { + logger.error( + s"Creator: $creatorName EC:$ecType there are currently:$creatorRunningJobNum jobs running that exceed the maximum limit: $creatorECTypeMaxRunningJobs" + ) + false + } else true + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala index 7f16dd2463..de4c025e30 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala @@ -17,30 +17,20 @@ package org.apache.linkis.entrance.scheduler -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.conf.{CommonVars, Configuration} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.execute.EntranceJob +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.instance.label.client.InstanceLabelClient -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.manager.label.entity.engine.{ - ConcurrentEngineConnLabel, - EngineTypeLabel, - UserCreatorLabel -} -import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.manager.label.utils.LabelUtil -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.rpc.Sender import org.apache.linkis.scheduler.queue.{Group, GroupFactory, SchedulerEvent} import org.apache.linkis.scheduler.queue.parallelqueue.ParallelGroup @@ -51,8 +41,6 @@ import java.util import java.util.concurrent.TimeUnit import java.util.regex.Pattern -import scala.collection.JavaConverters._ - import com.google.common.cache.{Cache, CacheBuilder} class EntranceGroupFactory extends GroupFactory with Logging { @@ -63,7 +51,7 @@ class EntranceGroupFactory extends GroupFactory with Logging { .maximumSize(EntranceConfiguration.GROUP_CACHE_MAX.getValue) .build() - private val GROUP_MAX_CAPACITY = CommonVars("wds.linkis.entrance.max.capacity", 2000) + private val GROUP_MAX_CAPACITY = CommonVars("wds.linkis.entrance.max.capacity", 1000) private val SPECIFIED_USERNAME_REGEX = CommonVars("wds.linkis.entrance.specified.username.regex", "hduser.*") @@ -81,29 +69,16 @@ class EntranceGroupFactory extends GroupFactory with Logging { } override def getOrCreateGroup(event: SchedulerEvent): Group = { - val (labels, params) = event match { + val labels = event match { case job: EntranceJob => - (job.getJobRequest.getLabels, job.getJobRequest.getParams) + job.getJobRequest.getLabels + case _ => + throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - val groupName = EntranceGroupFactory.getGroupNameByLabels(labels, params) + val groupName = EntranceGroupFactory.getGroupNameByLabels(labels) val cacheGroup = groupNameToGroups.getIfPresent(groupName) if (null == cacheGroup) synchronized { val maxAskExecutorTimes = EntranceConfiguration.MAX_ASK_EXECUTOR_TIME.getValue.toLong - if (groupName.startsWith(EntranceGroupFactory.CONCURRENT)) { - if (null == groupNameToGroups.getIfPresent(groupName)) synchronized { - if (null == groupNameToGroups.getIfPresent(groupName)) { - val group = new ParallelGroup( - groupName, - 100, - EntranceConfiguration.CONCURRENT_FACTORY_MAX_CAPACITY.getValue - ) - group.setMaxRunningJobs(EntranceConfiguration.CONCURRENT_MAX_RUNNING_JOBS.getValue) - group.setMaxAskExecutorTimes(EntranceConfiguration.CONCURRENT_EXECUTOR_TIME.getValue) - groupNameToGroups.put(groupName, group) - return group - } - } - } val sender: Sender = Sender.getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) val userCreatorLabel: UserCreatorLabel = LabelUtil.getUserCreatorLabel(labels) @@ -141,8 +116,11 @@ class EntranceGroupFactory extends GroupFactory with Logging { group.setMaxRunningJobs(maxRunningJobs) group.setMaxAskExecutorTimes(maxAskExecutorTimes) groupNameToGroups.put(groupName, group) + group + } + else { + cacheGroup } - groupNameToGroups.getIfPresent(groupName) } override def getGroup(groupName: String): Group = { @@ -156,105 +134,40 @@ class EntranceGroupFactory extends GroupFactory with Logging { group } + /** + * User task concurrency control is controlled for multiple Entrances, which will be evenly + * distributed based on the number of existing Entrances + * @param keyAndValue + * @return + */ private def getUserMaxRunningJobs(keyAndValue: util.Map[String, String]): Int = { - var userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) - var entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length - val labelList = new util.ArrayList[Label[_]]() - val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory - .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) - labelList.add(offlineRouteLabel) - var offlineIns: Array[ServiceInstance] = null - Utils.tryAndWarn { - offlineIns = InstanceLabelClient.getInstance - .getInstanceFromLabel(labelList) - .asScala - .filter(l => - null != l && l.getApplicationName - .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) - ) - .toArray - } - if (null != offlineIns) { - logger.info(s"There are ${offlineIns.length} offlining instance.") - entranceNum = entranceNum - offlineIns.length - } - /* - Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. - */ - if (0 >= entranceNum) { - logger.error( - s"Got ${entranceNum} ${Sender.getThisServiceInstance.getApplicationName} instances." - ) - entranceNum = 1 - } + val userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) + val entranceNum = EntranceUtils.getRunningEntranceNumber() Math.max( EntranceConfiguration.ENTRANCE_INSTANCE_MIN.getValue, userDefinedRunningJobs / entranceNum - ); + ) } } object EntranceGroupFactory { - val CACHE = "_Cache" - - val CONCURRENT = "Concurrent_" - - def getGroupName( - creator: String, - user: String, - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - if (StringUtils.isNotEmpty(creator)) creator + "_" + user + cache - else EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue + "_" + user + cache - } - - def getGroupNameByLabels( - labels: java.util.List[Label[_]], - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - - val userCreator = labels.asScala.find(_.isInstanceOf[UserCreatorLabel]) - val engineType = labels.asScala.find(_.isInstanceOf[EngineTypeLabel]) - val concurrent = labels.asScala.find(_.isInstanceOf[ConcurrentEngineConnLabel]) - if (userCreator.isEmpty || engineType.isEmpty) { + /** + * Entrance group rule creator_username_engineType eg:IDE_PEACEWONG_SPARK + * @param labels + * @param params + * @return + */ + def getGroupNameByLabels(labels: java.util.List[Label[_]]): String = { + val userCreatorLabel = LabelUtil.getUserCreatorLabel(labels) + val engineTypeLabel = LabelUtil.getEngineTypeLabel(labels) + if (null == userCreatorLabel || null == engineTypeLabel) { throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - - if (concurrent.isDefined) { - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - val groupName = CONCURRENT + engineTypeLabel.getEngineType - groupName - - } else { - val userCreatorLabel = userCreator.get.asInstanceOf[UserCreatorLabel] - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - val groupName = - userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + cache - groupName - } + val groupName = + userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + groupName } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala new file mode 100644 index 0000000000..789e2ca2b1 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.utils.EntranceUtils +import org.apache.linkis.instance.label.client.InstanceLabelClient +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.rpc.Sender +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer +import org.apache.linkis.scheduler.queue.parallelqueue.{ParallelConsumerManager, ParallelGroup} + +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ + +class EntranceParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) + extends ParallelConsumerManager(maxParallelismUsers, schedulerName) { + + override protected def createConsumer(groupName: String): FIFOUserConsumer = { + val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) + new EntranceFIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) + } + + if (EntranceConfiguration.ENTRANCE_GROUP_SCAN_ENABLED.getValue) { + Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable { + override def run(): Unit = Utils.tryAndWarn { + // refresh all group maxAllowRunningJobs + refreshAllGroupMaxAllowRunningJobs(EntranceUtils.getRunningEntranceNumber()) + logger.info("Finished to refresh consumer group maxAllowRunningJobs") + } + }, + EntranceConfiguration.ENTRANCE_GROUP_SCAN_INIT_TIME.getValue, + EntranceConfiguration.ENTRANCE_GROUP_SCAN_INTERVAL.getValue, + TimeUnit.MILLISECONDS + ) + } + + def refreshAllGroupMaxAllowRunningJobs(validInsCount: Int): Unit = { + listConsumers() + .foreach(item => { + item.getGroup match { + case group: ParallelGroup => + val maxAllowRunningJobs = Math.round(group.getMaxRunningJobs / validInsCount) + group.setMaxAllowRunningJobs(maxAllowRunningJobs) + logger + .info( + "group {} refresh maxAllowRunningJobs => {}/{}={}", + Array( + group.getGroupName, + group.getMaxRunningJobs.toString, + validInsCount.toString, + maxAllowRunningJobs.toString + ): _* + ) + case _ => + } + }) + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala index d5de2cc2da..1638b0fb1c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala @@ -28,6 +28,11 @@ class EntranceSchedulerContext extends SchedulerContext { private var consumerManager: ConsumerManager = _ private var executorManager: ExecutorManager = _ + private var offlineFlag: Boolean = false + + def setOfflineFlag(offlineFlag: Boolean): Unit = this.offlineFlag = offlineFlag + def getOfflineFlag: Boolean = this.offlineFlag + def this( groupFactory: GroupFactory, consumerManager: ConsumerManager, diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala deleted file mode 100644 index 47a6ce9e9e..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.scheduler.executer.OutputExecuteResponse - -case class CacheOutputExecuteResponse(alias: String, output: String) extends OutputExecuteResponse { - override def getOutput: String = output -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala deleted file mode 100644 index 65bbbd39b4..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.common.io.FsPath -import org.apache.linkis.common.utils.Utils -import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ -import org.apache.linkis.entrance.exception.CacheNotReadyException -import org.apache.linkis.entrance.execute.EntranceJob -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.utils.JobHistoryHelper -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.manager.label.constant.LabelKeyConstant -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils -import org.apache.linkis.scheduler.SchedulerContext -import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ -import org.apache.linkis.scheduler.exception.SchedulerErrorException -import org.apache.linkis.scheduler.executer.SuccessExecuteResponse -import org.apache.linkis.scheduler.queue.Group -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.server.BDPJettyServerHelper -import org.apache.linkis.storage.FSFactory -import org.apache.linkis.storage.fs.FileSystem - -import org.apache.commons.io.FilenameUtils -import org.apache.commons.lang3.StringUtils - -import java.util.concurrent.ExecutorService - -import scala.collection.JavaConverters._ - -import com.google.common.collect.Lists - -class ReadCacheConsumer( - schedulerContext: SchedulerContext, - executeService: ExecutorService, - private var group: Group, - persistenceManager: PersistenceManager -) extends FIFOUserConsumer(schedulerContext, executeService, group) { - - override protected def loop(): Unit = { - val event = Option(getConsumeQueue.take()) - event.foreach { - case job: EntranceJob => - job.getJobRequest match { - case jobRequest: JobRequest => - Utils.tryCatch { - val engineTpyeLabel = jobRequest.getLabels.asScala - .filter(l => l.getLabelKey.equalsIgnoreCase(LabelKeyConstant.ENGINE_TYPE_KEY)) - .headOption - .getOrElse(null) - val labelStrList = jobRequest.getLabels.asScala.map { case l => - l.getStringValue - }.toList - if (null == engineTpyeLabel) { - logger.error( - "Invalid engineType null, cannot process. jobReq : " + BDPJettyServerHelper.gson - .toJson(jobRequest) - ) - throw CacheNotReadyException( - INVALID_ENGINETYPE_NULL.getErrorCode, - INVALID_ENGINETYPE_NULL.getErrorDesc - ) - } - val readCacheBefore: Long = TaskUtils - .getRuntimeMap(job.getParams) - .getOrDefault(TaskConstant.READ_CACHE_BEFORE, 300L: java.lang.Long) - .asInstanceOf[Long] - val cacheResult = JobHistoryHelper.getCache( - jobRequest.getExecutionCode, - jobRequest.getExecuteUser, - labelStrList.asJava, - readCacheBefore - ) - if (cacheResult != null && StringUtils.isNotBlank(cacheResult.getResultLocation)) { - val resultSets = listResults(cacheResult.getResultLocation, job.getUser) - if (resultSets.size() > 0) { - for (resultSet: FsPath <- resultSets.asScala) { - val alias = FilenameUtils.getBaseName(resultSet.getPath) - val output = FsPath - .getFsPath( - cacheResult.getResultLocation, - FilenameUtils.getName(resultSet.getPath) - ) - .getSchemaPath -// persistenceManager.onResultSetCreated(job, new CacheOutputExecuteResponse(alias, output)) - throw CacheNotReadyException( - INVALID_RESULTSETS.getErrorCode, - INVALID_RESULTSETS.getErrorDesc - ) - // todo check - } -// persistenceManager.onResultSizeCreated(job, resultSets.size()) - } - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - job.transitionCompleted(SuccessExecuteResponse(), "Result found in cache") - } else { - logger.info("Cache not found, submit to normal consumer.") - submitToExecute(job) - } - } { t => - logger.warn("Read cache failed, submit to normal consumer: ", t) - submitToExecute(job) - } - case _ => - } - case _ => - } - } - - private def listResults(resultLocation: String, user: String) = { - val dirPath = FsPath.getFsPath(resultLocation) - val fileSystem = FSFactory.getFsByProxyUser(dirPath, user).asInstanceOf[FileSystem] - Utils.tryFinally { - fileSystem.init(null) - if (fileSystem.exists(dirPath)) { - fileSystem.listPathWithError(dirPath).getFsPaths - } else { - Lists.newArrayList[FsPath]() - } - }(Utils.tryQuietly(fileSystem.close())) - } - - private def submitToExecute(job: EntranceJob): Unit = { - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.READ_FROM_CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - val groupName = schedulerContext.getOrCreateGroupFactory.getOrCreateGroup(job).getGroupName - val consumer = schedulerContext.getOrCreateConsumerManager.getOrCreateConsumer(groupName) - val index = consumer.getConsumeQueue.offer(job) - // index.map(getEventId(_, groupName)).foreach(job.setId) - if (index.isEmpty) { - throw new SchedulerErrorException( - JOB_QUEUE_IS_FULL.getErrorCode, - JOB_QUEUE_IS_FULL.getErrorDesc - ) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala deleted file mode 100644 index a4cba19f34..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.scheduler.EntranceGroupFactory -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager - -class ReadCacheConsumerManager(maxParallelismUsers: Int, persistenceManager: PersistenceManager) - extends ParallelConsumerManager(maxParallelismUsers) { - - override protected def createConsumer(groupName: String): FIFOUserConsumer = { - val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) - if (groupName.endsWith(EntranceGroupFactory.CACHE)) { - logger.info("Create cache consumer with group: " + groupName) - new ReadCacheConsumer( - getSchedulerContext, - getOrCreateExecutorService, - group, - persistenceManager - ) - } else { - logger.info("Create normal consumer with group: " + groupName) - new FIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala index aaaf131bd8..4e62430316 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala @@ -38,8 +38,8 @@ class JobTimeoutManager extends Logging { private[this] final val timeoutJobByName: ConcurrentMap[String, EntranceJob] = new ConcurrentHashMap[String, EntranceJob] - val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue - val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue + private val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + private val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue def add(jobKey: String, job: EntranceJob): Unit = { logger.info(s"Adding timeout job: ${job.getId()}") @@ -77,75 +77,75 @@ class JobTimeoutManager extends Logging { } private def timeoutDetective(): Unit = { - if (timeoutCheck) { - def checkAndSwitch(job: EntranceJob): Unit = { - logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") - val currentTimeSeconds = System.currentTimeMillis() / 1000 - // job.isWaiting == job in queue - val jobScheduleStartTimeSeconds = - if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds - val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds - val jobRunningStartTimeSeconds = - if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds - val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds - if (!job.isCompleted) { - job.jobRequest.getLabels.asScala foreach { - case queueTimeOutLabel: JobQueuingTimeoutLabel => - if ( - job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", - null - ) - } - case jobRunningTimeoutLabel: JobRunningTimeoutLabel => - if ( - job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", - null - ) - } - case _ => - } + def checkAndSwitch(job: EntranceJob): Unit = { + logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") + val currentTimeSeconds = System.currentTimeMillis() / 1000 + // job.isWaiting == job in queue + val jobScheduleStartTimeSeconds = + if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds + val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds + val jobRunningStartTimeSeconds = + if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds + val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds + if (!job.isCompleted) { + job.jobRequest.getLabels.asScala foreach { + case queueTimeOutLabel: JobQueuingTimeoutLabel => + if ( + job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", + null + ) + } + case jobRunningTimeoutLabel: JobRunningTimeoutLabel => + if ( + job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", + null + ) + } + case _ => } } - - timeoutJobByName.asScala.foreach(item => { - logger.info(s"Running timeout detection!") - synchronized { - jobCompleteDelete(item._1) - if (jobExist(item._1)) checkAndSwitch(item._2) - } - }) } + + timeoutJobByName.asScala.foreach(item => { + logger.info(s"Running timeout detection!") + synchronized { + jobCompleteDelete(item._1) + if (jobExist(item._1)) checkAndSwitch(item._2) + } + }) } // Thread periodic scan timeout task - val woker = Utils.defaultScheduler.scheduleAtFixedRate( - new Runnable() { - - override def run(): Unit = { - Utils.tryCatch { - timeoutDetective() - } { case t: Throwable => - logger.error(s"TimeoutDetective task failed. ${t.getMessage}", t) + if (timeoutCheck) { + val woker = Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + + override def run(): Unit = { + Utils.tryCatch { + timeoutDetective() + } { case t: Throwable => + logger.warn(s"TimeoutDetective task failed. ${t.getMessage}", t) + } } - } - }, - 0, - timeoutScanInterval, - TimeUnit.SECONDS - ) + }, + 0, + timeoutScanInterval, + TimeUnit.SECONDS + ) + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala index 1311374fc1..746774633e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala @@ -19,10 +19,16 @@ package org.apache.linkis.entrance.utils import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.fs.FileSystem import org.apache.linkis.storage.utils.{FileSystemUtils, StorageConfiguration, StorageUtils} +import java.text.SimpleDateFormat +import java.util.Date + object CommonLogPathUtils { def buildCommonPath(commonPath: String): Unit = { @@ -52,4 +58,35 @@ object CommonLogPathUtils { } } + private val resPrefix = EntranceConfiguration.DEFAULT_LOGPATH_PREFIX.getValue + + /** + * get result path parentPath: resPrefix + dateStr + result + creator subPath: parentPath + + * executeUser + taskid + filename + * @param jobRequest + * @return + */ + def getResultParentPath(jobRequest: JobRequest): String = { + val resStb = new StringBuilder() + if (resStb.endsWith("/")) { + resStb.append(resPrefix) + } else { + resStb.append(resPrefix).append("/") + } + val dateFormat = new SimpleDateFormat("yyyy-MM-dd") + val date = new Date(System.currentTimeMillis) + val dateString = dateFormat.format(date) + val userCreator = LabelUtil.getUserCreatorLabel(jobRequest.getLabels) + val creator = + if (null == userCreator) EntranceConfiguration.DEFAULT_CREATE_SERVICE + else userCreator.getCreator + resStb.append("result").append("/").append(dateString).append("/").append(creator) + resStb.toString() + } + + def getResultPath(jobRequest: JobRequest): String = { + val parentPath = getResultParentPath(jobRequest) + parentPath + "/" + jobRequest.getExecuteUser + "/" + jobRequest.getId + } + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala new file mode 100644 index 0000000000..13dcefa9f9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.utils + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.instance.label.client.InstanceLabelClient +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter + +object EntranceUtils extends Logging { + + private val SPLIT = "," + + private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory + + def getUserCreatorEcTypeKey( + userCreatorLabel: UserCreatorLabel, + engineTypeLabel: EngineTypeLabel + ): String = { + userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue + } + + def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { + if (StringUtils.isBlank(key)) (null, null) + else { + val labelStringValues = key.split(SPLIT) + if (labelStringValues.length < 2) return (null, null) + val userCreatorLabel = labelFactory + .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) + val engineTypeLabel = labelFactory + .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) + (userCreatorLabel, engineTypeLabel) + } + } + + def getDefaultCreatorECTypeKey(creator: String, ecType: String): String = { + val userCreatorLabel = + labelFactory.createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY) + val ecTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(ecType) + userCreatorLabel.setUser("*") + userCreatorLabel.setCreator(creator) + getUserCreatorEcTypeKey(userCreatorLabel, ecTypeLabel) + } + + def getRunningEntranceNumber(): Int = { + val entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length + val labelList = new util.ArrayList[Label[_]]() + val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory + .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) + labelList.add(offlineRouteLabel) + var offlineIns: Array[ServiceInstance] = null + Utils.tryAndWarn { + offlineIns = InstanceLabelClient.getInstance + .getInstanceFromLabel(labelList) + .asScala + .filter(l => + null != l && l.getApplicationName + .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) + ) + .toArray + } + val entranceRealNumber = if (null != offlineIns) { + logger.info(s"There are ${offlineIns.length} offlining instance.") + entranceNum - offlineIns.length + } else { + entranceNum + } + /* + Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. + */ + if (entranceRealNumber <= 0) { + logger.error( + s"Got ${entranceRealNumber} ${Sender.getThisServiceInstance.getApplicationName} instances." + ) + 1 + } else { + entranceRealNumber + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala index ec29128889..44e2357b34 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala @@ -69,6 +69,11 @@ object JobHistoryHelper extends Logging { else task.getStatus } + def getProgressByTaskID(taskID: Long): String = { + val task = getTaskByTaskID(taskID) + if (task == null) "0" else task.getProgress + } + def getRequestIpAddr(req: HttpServletRequest): String = { val addrList = List( Option(req.getHeader("x-forwarded-for")).getOrElse("").split(",")(0), @@ -123,7 +128,144 @@ object JobHistoryHelper extends Logging { sender.ask(jobReqBatchUpdate) } - private def getTaskByTaskID(taskID: Long): JobRequest = { + /** + * Get all consume queue task and batch update instances(获取所有消费队列中的任务进行批量更新) + * + * @param taskIdList + * @param retryWhenUpdateFail + */ + def updateAllConsumeQueueTask( + taskIdList: util.List[Long], + retryWhenUpdateFail: Boolean = false + ): Unit = { + + if (taskIdList.isEmpty) return + + val updateTaskIds = new util.ArrayList[Long]() + + if ( + EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue > 0 && + taskIdList.size() > EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue + ) { + for (i <- 0 until EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue) { + updateTaskIds.add(taskIdList.get(i)) + } + } else { + updateTaskIds.addAll(taskIdList) + } + val list = new util.ArrayList[Long]() + list.addAll(taskIdList) + try { + val successTaskIds = updateBatchInstancesEmpty(updateTaskIds) + if (retryWhenUpdateFail) { + list.removeAll(successTaskIds) + } else { + list.removeAll(updateTaskIds) + } + } catch { + case e: Exception => + logger.warn("update batch instances failed, wait for retry", e) + Thread.sleep(1000) + } + updateAllConsumeQueueTask(list, retryWhenUpdateFail) + + } + + /** + * Batch update instances(批量更新instances字段) + * + * @param taskIdList + * @return + */ + def updateBatchInstancesEmpty(taskIdList: util.List[Long]): util.List[Long] = { + val jobReqList = new util.ArrayList[JobRequest]() + taskIdList.asScala.foreach(taskID => { + val jobRequest = new JobRequest + jobRequest.setId(taskID) + jobRequest.setInstances("") + jobReqList.add(jobRequest) + }) + val jobReqBatchUpdate = JobReqBatchUpdate(jobReqList) + Utils.tryCatch { + val response = sender.ask(jobReqBatchUpdate) + response match { + case resp: util.List[JobRespProtocol] => + // todo filter success data, rpc have bug +// resp.asScala +// .filter(r => +// r.getStatus == SUCCESS_FLAG && r.getData.containsKey(JobRequestConstants.JOB_ID) +// ) +// .map(_.getData.get(JobRequestConstants.JOB_ID).asInstanceOf[java.lang.Long]) +// .toList + + taskIdList + case _ => + throw JobHistoryFailedException( + "update batch instances from jobhistory not a correct List type" + ) + } + } { + case errorException: ErrorException => throw errorException + case e: Exception => + val e1 = + JobHistoryFailedException( + s"update batch instances ${taskIdList.asScala.mkString(",")} error" + ) + e1.initCause(e) + throw e + } + } + + /** + * query wait for failover task(获取待故障转移的任务) + * + * @param reqMap + * @param statusList + * @param startTimestamp + * @param limit + * @return + */ + def queryWaitForFailoverTask( + reqMap: util.Map[String, java.lang.Long], + statusList: util.List[String], + startTimestamp: Long, + limit: Int + ): util.List[JobRequest] = { + val requestFailoverJob = RequestFailoverJob(reqMap, statusList, startTimestamp, limit) + val tasks = Utils.tryCatch { + val response = sender.ask(requestFailoverJob) + response match { + case responsePersist: JobRespProtocol => + val status = responsePersist.getStatus + if (status != SUCCESS_FLAG) { + logger.error(s"query from jobHistory status failed, status is $status") + throw JobHistoryFailedException("query from jobHistory status failed") + } + val data = responsePersist.getData + data.get(JobRequestConstants.JOB_HISTORY_LIST) match { + case tasks: List[JobRequest] => + tasks.asJava + case _ => + throw JobHistoryFailedException( + s"query from jobhistory not a correct List type, instances ${reqMap.keySet()}" + ) + } + case _ => + logger.error("get query response incorrectly") + throw JobHistoryFailedException("get query response incorrectly") + } + } { + case errorException: ErrorException => throw errorException + case e: Exception => + val e1 = + JobHistoryFailedException(s"query failover task error, instances ${reqMap.keySet()} ") + e1.initCause(e) + throw e + } + tasks + } + + def getTaskByTaskID(taskID: Long): JobRequest = { val jobRequest = new JobRequest jobRequest.setId(taskID) jobRequest.setSource(null) @@ -176,15 +318,15 @@ object JobHistoryHelper extends Logging { val ecResourceMap = if (resourceInfo == null) new util.HashMap[String, ResourceWithStatus] else resourceInfo if (resourceMap != null) { - resourceMap.asInstanceOf[util.HashMap[String, ResourceWithStatus]].putAll(ecResourceMap) + resourceMap.asInstanceOf[util.Map[String, ResourceWithStatus]].putAll(ecResourceMap) } else { metricsMap.put(TaskConstant.JOB_YARNRESOURCE, ecResourceMap) } - var engineInstanceMap: util.HashMap[String, AnyRef] = null + var engineInstanceMap: util.Map[String, AnyRef] = null if (metricsMap.containsKey(TaskConstant.JOB_ENGINECONN_MAP)) { engineInstanceMap = metricsMap .get(TaskConstant.JOB_ENGINECONN_MAP) - .asInstanceOf[util.HashMap[String, AnyRef]] + .asInstanceOf[util.Map[String, AnyRef]] } else { engineInstanceMap = new util.HashMap[String, AnyRef]() metricsMap.put(TaskConstant.JOB_ENGINECONN_MAP, engineInstanceMap) @@ -194,7 +336,7 @@ object JobHistoryHelper extends Logging { val ticketId = infoMap.get(TaskConstant.TICKET_ID).asInstanceOf[String] val engineExtraInfoMap = engineInstanceMap .getOrDefault(ticketId, new util.HashMap[String, AnyRef]) - .asInstanceOf[util.HashMap[String, AnyRef]] + .asInstanceOf[util.Map[String, AnyRef]] engineExtraInfoMap.putAll(infoMap) engineInstanceMap.put(ticketId, engineExtraInfoMap) } else { diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java new file mode 100644 index 0000000000..c5efb5633e --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class SQLExplainTest { + + @Test + void isSelectCmdNoLimit() { + + String code = "SELECT * from dual WHERE (1=1)LIMIT 1;"; + boolean res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(false, res); + + code = "SELECT * from dual"; + res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(true, res); + + code = "SELECT * from dual LIMIT 1;"; + res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(false, res); + } + + @Test + void isSelectOverLimit() { + String code = "SELECT * from dual WHERE (1=1)LIMIT 5001;"; + boolean res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(true, res); + + code = "SELECT * from dual"; + res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(false, res); + + code = "SELECT * from dual LIMIT 4000;"; + res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(false, res); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java new file mode 100644 index 0000000000..c965529b57 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class TemplateConfUtilsTest { + + @Test + void getCustomTemplateConfName() { + String sqlCode = + "" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + String res = TemplateConfUtils.getCustomTemplateConfName(sqlCode, "sql"); + Assertions.assertEquals(res, ""); + + String sqlCode2 = + "" + + "---@set 123=注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode2, "sql"); + Assertions.assertEquals(res, ""); + + String sqlCode3 = + "" + + "---@set ec.resource.name=345\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + "---@set ec.resource.name=456\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode3, "sql"); + Assertions.assertEquals(res, "345"); + + String sqlCode4 = + "" + + "---@set ec.resource.name= name1 \n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode4, "sql"); + Assertions.assertEquals(res, "name1"); + + String sqlCode5 = + "" + + "##@set ec.resource.name=pyname1\n" + + "select * from table;\n" + + " --注解 \n" + + "#注解\n" + + "##@set ec.resource.name= 123 \n" + + " select \"--注解\" as test\n" + + "#@set yy=123\n" + + " #注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode5, "python"); + Assertions.assertEquals(res, "pyname1"); + + String sqlCode6 = + "" + + "///@set ec.resource.name= scalaname1 \n" + + " select \"//注解\" as test\n" + + "//@set yy=123\n" + + " #注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode6, "scala"); + Assertions.assertEquals(res, "scalaname1"); + + String sqlCode7 = + "" + + "---@set ec.resource.name= hqlname1 \n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode7, "hql"); + Assertions.assertEquals(res, "hqlname1"); + + String sqlCode8 = + "---@set ec.resource.name=linkis_test2;\n" + + " ---@set ec.resource.name=scriptis_test hive;\n" + + " select * from dss autotest.demo data limit 100;"; + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode8, "hql"); + Assertions.assertEquals(res, "linkis_test2"); + } + + @Test + void getCustomTemplateConfName2() { + + String sqlCode9 = + "---@set ec.resource.name=linkis_test2;\r\n---@set ec.resource.name=scriptis_test_hive;\r\n--@set limitn=100\r\nselect * from dss_autotest.demo_data limit ${limitn};\r\n"; + + String res = TemplateConfUtils.getCustomTemplateConfName(sqlCode9, "hql"); + Assertions.assertEquals(res, "linkis_test2"); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java new file mode 100644 index 0000000000..622d06c4e8 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import java.util.Arrays; + +import org.junit.jupiter.api.Test; +import org.junit.platform.commons.util.StringUtils; + +public class TestCommentHelper { + String sqlCode = + "" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + String scalaCode = + "" + + "// 注解\n" + + "print(1+1)\n" + + "//@set yy=123\n" + + " print(2)\n" + + " // 注解 \n" + + "// test\n" + + "print(\"//注解测试\")"; + + String scalaCodeRes = "print(1+1)\n" + "print(2)\n" + "print(\"//注解测试\")"; + + @Test + void sqlDealCommentTest() { + String code = SQLCommentHelper.dealComment(sqlCode); + // System.out.println(code); + } + + @Test + void scalaDealCommentTest() { + String code = ScalaCommentHelper.dealComment(scalaCode); + String[] lines = + Arrays.stream(code.split("\n")) + .map(String::trim) + .filter(x -> StringUtils.isNotBlank(x)) + .toArray(String[]::new); + String result = String.join("\n", lines); + // assertEquals(result,scalaCodeRes); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java new file mode 100644 index 0000000000..fabff88473 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.apache.linkis.entrance.log.Cache; +import org.apache.linkis.entrance.log.HDFSCacheLogWriter; + +import org.apache.commons.lang3.StringUtils; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import oshi.util.FileUtil; + +class TestHDFSCacheLogWriter { + + @Test + void write() throws IOException { + + Cache cache = new Cache(5); + String fileName = UUID.randomUUID().toString().replace("-", "") + "-test.log"; + String logPath = System.getProperty("java.io.tmpdir") + File.separator + fileName; + System.out.println(logPath); + String chartSet = "utf-8"; + String username = System.getProperty("user.name"); + + File file = new File(logPath); + file.createNewFile(); + + HDFSCacheLogWriter logWriter = + new HDFSCacheLogWriter( + // "D:\\DataSphere\\linkis\\docs\\test.log", + logPath, chartSet, cache, username); + + String[] msgArr = + new String[] { + "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", + "17", "18" + }; + + List msgList = new ArrayList(Arrays.asList(msgArr)); + String msg = String.join("\n", msgList); + + logWriter.write(msg); + logWriter.flush(); + + List list = FileUtil.readFile(logPath); + String res = String.join("\n", list); + + res = res.replace("\n\n", "\n"); + res = StringUtils.strip(res, " \n"); + Assertions.assertEquals(res, msg); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestReplaceComment.scala b/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestReplaceComment.scala new file mode 100644 index 0000000000..3310f09581 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestReplaceComment.scala @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.junit.jupiter.api.{Assertions, Test} + +class TestReplaceComment { + + @Test + def TestRepComm: Unit = { + val realCode = "drop table if exists default.test;" + + "create table default.test(" + + "id varchar(11) comment '这是注释测试分号;这是注释测试分号;'," + + "id1 string comment '测试'," + + "id2 string COMMENT '码值说明:2-高;3-中;4-低;'" + + ");" + val expectCode = "drop table if exists default.test;" + + "create table default.test(" + + "id varchar(11) comment '这是注释测试分号\\;这是注释测试分号\\;'," + + "id1 string comment '测试'," + + "id2 string COMMENT '码值说明:2-高\\;3-中\\;4-低\\;'" + + ");" + + Assertions.assertEquals(SQLCommentHelper.replaceComment(realCode), expectCode) + } + +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java index c12e2791b3..0bc0b08c52 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java @@ -42,6 +42,7 @@ public class UJESSQLDriver extends UJESSQLDriverMain implements Driver { static String DB_NAME = "DBNAME"; static String PARAMS = "PARAMS"; static String ENGINE_TYPE = "EngineType"; + static String ENGINE_VERSION = "EngineVersion"; static String USER = "user"; static String TOKEN_KEY = "key"; @@ -49,6 +50,9 @@ public class UJESSQLDriver extends UJESSQLDriverMain implements Driver { static String PASSWORD = "password"; static boolean TABLEAU_SERVER = false; static String FIXED_SESSION = "fixedSession"; + static String ENABLE_MULTI_RESULT = "enableMultiResult"; + + static String USE_SSL = "useSSL"; static String VERSION = "version"; static int DEFAULT_VERSION = 1; static String MAX_CONNECTION_SIZE = "maxConnectionSize"; diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala index 7b3e2ada8c..0be96b2c15 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala @@ -18,7 +18,7 @@ package org.apache.linkis.ujes.jdbc import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.governance.common.constant.job.JobRequestConstants import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.engine.{EngineType, EngineTypeLabel, RunType} import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator @@ -87,9 +87,7 @@ class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Prope tableauFlag } - private[jdbc] val dbName = - if (StringUtils.isNotBlank(props.getProperty(DB_NAME))) props.getProperty(DB_NAME) - else "default" + private[jdbc] val dbName = props.getProperty(DB_NAME) private val runningSQLStatements = new util.LinkedList[Statement] @@ -120,26 +118,30 @@ class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Prope private val runtimeParams: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] private[jdbc] def getEngineType: EngineTypeLabel = { - val engineType: EngineTypeLabel = - EngineTypeLabelCreator.createEngineTypeLabel(EngineType.TRINO.toString) + + var engineType = EngineType.TRINO.toString + var engineVersion = "" if (props.containsKey(PARAMS)) { val params = props.getProperty(PARAMS) if (params != null & params.length() > 0) { params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { - case Array(k, v) if k.equals(UJESSQLDriver.ENGINE_TYPE) => - if (v.contains('-')) { - val factory = LabelBuilderFactoryContext.getLabelBuilderFactory - val label = factory.createLabel(classOf[EngineTypeLabel]) - label.setStringValue(v) - return label - } else { - return EngineTypeLabelCreator.createEngineTypeLabel(v) + case Array(k, v) => + if (k.equals(UJESSQLDriver.ENGINE_TYPE)) { + engineType = v + } else if (k.equals(UJESSQLDriver.ENGINE_VERSION)) { + engineVersion = v } + case _ => } } } - engineType + if (StringUtils.isNotBlank(engineVersion)) { + EngineTypeLabelCreator.registerVersion(engineType, engineVersion) + } + + EngineTypeLabelCreator.createEngineTypeLabel(engineType) + } private[jdbc] def throwWhenClosed[T](op: => T): T = @@ -150,10 +152,6 @@ class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Prope val statement = op runningSQLStatements.add(statement) - if (!inited) { - inited = true - Utils.tryAndWarn(statement.execute(s"USE $dbName")) - } statement } @@ -450,7 +448,6 @@ class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Prope val runType = EngineType.mapStringToEngineType(engine) match { case EngineType.SPARK => RunType.SQL case EngineType.HIVE => RunType.HIVE - case EngineType.REPL => RunType.REPL case EngineType.TRINO => RunType.TRINO_SQL case EngineType.PRESTO => RunType.PRESTO_SQL case EngineType.NEBULA => RunType.NEBULA_SQL @@ -472,6 +469,10 @@ class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Prope logger.info("Fixed session is enable session id is {}", connectionId) } + if (StringUtils.isNotBlank(dbName)) { + runtimeParams.put(JobRequestConstants.LINKIS_JDBC_DEFAULT_DB, dbName) + } + val jobSubmitAction = JobSubmitAction.builder .addExecuteCode(code) .setStartupParams(startupParams) diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala index f00d870978..e3a1475d2b 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala @@ -37,6 +37,10 @@ class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnectio with Logging { private var jobExecuteResult: JobExecuteResult = _ + + private val openedResultSets: util.ArrayList[UJESSQLResultSet] = + new util.ArrayList[UJESSQLResultSet]() + private var resultSet: UJESSQLResultSet = _ private var closed = false private var maxRows: Int = 0 @@ -190,7 +194,7 @@ class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnectio override def getUpdateCount: Int = throwWhenClosed(-1) - override def getMoreResults: Boolean = false + override def getMoreResults: Boolean = getMoreResults(Statement.CLOSE_CURRENT_RESULT) override def setFetchDirection(direction: Int): Unit = throwWhenClosed(if (direction != ResultSet.FETCH_FORWARD) { @@ -230,7 +234,45 @@ class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnectio override def getConnection: Connection = throwWhenClosed(ujesSQLConnection) - override def getMoreResults(current: Int): Boolean = false + override def getMoreResults(current: Int): Boolean = { + if (this.resultSet == null) { + false + } else { + this.resultSet.getMetaData + val nextResultSet = this.resultSet.getNextResultSet + current match { + case Statement.CLOSE_CURRENT_RESULT => + // 1 - CLOSE CURRENT RESULT SET + this.resultSet.close() + this.resultSet.clearNextResultSet + case Statement.KEEP_CURRENT_RESULT => + // 2 - KEEP CURRENT RESULT SET + this.openedResultSets.add(this.resultSet) + this.resultSet.clearNextResultSet + case Statement.CLOSE_ALL_RESULTS => + // 3 - CLOSE ALL RESULT SET + this.openedResultSets.add(this.resultSet) + closeAllOpenedResultSet() + case _ => + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "getMoreResults with current not in 1,2,3 is not supported, see Statement.getMoreResults" + ) + } + this.resultSet = nextResultSet + this.resultSet != null + } + } + + private def closeAllOpenedResultSet(): Any = { + val iterator = this.openedResultSets.iterator() + while (iterator.hasNext) { + val set = iterator.next() + if (!set.isClosed) { + set.close() + } + } + } override def getGeneratedKeys: ResultSet = throw new LinkisSQLException( LinkisSQLErrorCode.NOSUPPORT_STATEMENT, @@ -302,6 +344,7 @@ class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnectio /** * log[0] error log[1] warn log[2] info log[3] all (info + warn + error) + * * @return */ def getAllLog(): Array[String] = { @@ -316,6 +359,7 @@ class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnectio /** * log[0] error log[1] warn log[2] info log[3] all (info + warn + error) + * * @return */ def getIncrementalLog(): util.List[String] = { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala index bea24181a3..96132d5641 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala @@ -23,8 +23,10 @@ import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder import org.apache.linkis.ujes.client.UJESClient import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain._ +import org.apache.commons.codec.binary.Hex import org.apache.commons.lang3.StringUtils +import java.nio.charset.StandardCharsets import java.util import java.util.Properties @@ -36,29 +38,50 @@ object UJESClientFactory extends Logging { val host = props.getProperty(HOST) val port = props.getProperty(PORT) val user = props.getProperty(USER) - val serverUrl = if (StringUtils.isNotBlank(port)) s"http://$host:$port" else "http://" + host - val uniqueKey = s"${serverUrl}_$user" - if (ujesClients.containsKey(uniqueKey)) { - logger.info("Clients with the same JDBC unique key({}) will get it directly", uniqueKey) - ujesClients.get(uniqueKey) + val pwd = props.getProperty(PASSWORD) + val sslEnabled = + if ( + props + .containsKey(USE_SSL) && "true".equalsIgnoreCase(props.getProperty(USE_SSL)) + ) { + true + } else { + false + } + val prefix = if (sslEnabled) { + "https" + } else { + "http" + } + val serverUrl = + if (StringUtils.isNotBlank(port)) s"$prefix://$host:$port" else "$prefix://" + host + val uniqueKey = s"${serverUrl}_${user}_${pwd}" + val uniqueKeyDes = Hex.encodeHexString(uniqueKey.getBytes(StandardCharsets.UTF_8)) + if (ujesClients.containsKey(uniqueKeyDes)) { + logger.info("Clients with the same JDBC unique key({}) will get it directly", serverUrl) + ujesClients.get(uniqueKeyDes) } else { - uniqueKey.intern synchronized { - if (ujesClients.containsKey(uniqueKey)) { - logger.info("Clients with the same JDBC unique key({}) will get it directly", uniqueKey) - return ujesClients.get(uniqueKey) + uniqueKeyDes.intern synchronized { + if (ujesClients.containsKey(uniqueKeyDes)) { + logger.info("Clients with the same JDBC unique key({}) will get it directly", serverUrl) + return ujesClients.get(uniqueKeyDes) } logger.info( "The same Client does not exist for the JDBC unique key({}), a new Client will be created", - uniqueKey + serverUrl ) - val ujesClient = createUJESClient(serverUrl, props) - ujesClients.put(uniqueKey, ujesClient) + val ujesClient = createUJESClient(serverUrl, props, sslEnabled) + ujesClients.put(uniqueKeyDes, ujesClient) ujesClient } } } - private def createUJESClient(serverUrl: String, props: Properties): UJESClient = { + private def createUJESClient( + serverUrl: String, + props: Properties, + sslEnabled: Boolean + ): UJESClient = { val clientConfigBuilder = DWSClientConfigBuilder.newBuilder() clientConfigBuilder.addServerUrl(serverUrl) clientConfigBuilder.setAuthTokenKey(props.getProperty(USER)) @@ -89,6 +112,10 @@ object UJESClientFactory extends Logging { } } if (!versioned) clientConfigBuilder.setDWSVersion("v" + DEFAULT_VERSION) + + if (sslEnabled) { + clientConfigBuilder.setSSL(sslEnabled) + } UJESClient(clientConfigBuilder.build()) } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala index ab2f6dda10..44686981e8 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala @@ -75,6 +75,12 @@ class UJESSQLDriverMain extends Driver with Logging { case Array(FIXED_SESSION, value) => props.setProperty(FIXED_SESSION, value) false + case Array(USE_SSL, value) => + props.setProperty(USE_SSL, value) + false + case Array(ENABLE_MULTI_RESULT, value) => + props.setProperty(ENABLE_MULTI_RESULT, value) + false case Array(key, _) => if (StringUtils.isBlank(key)) { throw new LinkisSQLException( @@ -138,6 +144,9 @@ object UJESSQLDriverMain { val PASSWORD = UJESSQLDriver.PASSWORD val TABLEAU_SERVER = UJESSQLDriver.TABLEAU_SERVER val FIXED_SESSION = UJESSQLDriver.FIXED_SESSION + val ENABLE_MULTI_RESULT = UJESSQLDriver.ENABLE_MULTI_RESULT + + val USE_SSL = UJESSQLDriver.USE_SSL val VERSION = UJESSQLDriver.VERSION val DEFAULT_VERSION = UJESSQLDriver.DEFAULT_VERSION diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala index 0ed47925c6..02e8551722 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala @@ -20,6 +20,7 @@ package org.apache.linkis.ujes.jdbc import org.apache.linkis.common.utils.Logging import org.apache.linkis.ujes.client.request.ResultSetAction import org.apache.linkis.ujes.client.response.ResultSetResult +import org.apache.linkis.ujes.client.utils.UJESClientUtils import org.apache.commons.lang3.StringUtils @@ -76,6 +77,7 @@ class UJESSQLResultSet( private var path: String = _ private var metaData: util.List[util.Map[String, String]] = _ private val statement: LinkisSQLStatement = ujesStatement + private var nextResultSet: UJESSQLResultSet = _ private val connection: LinkisSQLConnection = ujesStatement.getConnection.asInstanceOf[LinkisSQLConnection] @@ -102,7 +104,15 @@ class UJESSQLResultSet( private def getResultSetPath(resultSetList: Array[String]): String = { if (resultSetList.length > 0) { - resultSetList(resultSetList.length - 1) + val enableMultiResult = connection.getProps.getProperty(UJESSQLDriverMain.ENABLE_MULTI_RESULT) + enableMultiResult match { + case "Y" => + // 配置开启时,返回首个结果集 + resultSetList(0) + case _ => + // 配置关闭时,返回以最后一个结果集为准 + resultSetList(resultSetList.length - 1) + } } else { "" } @@ -110,6 +120,12 @@ class UJESSQLResultSet( private def resultSetResultInit(): Unit = { if (path == null) path = getResultSetPath(resultSetList) + // 设置下一个结果集 + val enableMultiResult = connection.getProps.getProperty(UJESSQLDriverMain.ENABLE_MULTI_RESULT) + if (resultSetList.length > 1 && "Y".equals(enableMultiResult)) { + this.nextResultSet = + new UJESSQLResultSet(resultSetList.drop(1), this.statement, maxRows, fetchSize) + } val user = connection.getProps.getProperty("user") if (StringUtils.isNotBlank(path)) { val resultAction = @@ -160,11 +176,13 @@ class UJESSQLResultSet( return } metaData = resultSetResult.getMetadata.asInstanceOf[util.List[util.Map[String, String]]] - for (cursor <- 1 to metaData.size()) { - val col = metaData.get(cursor - 1) - resultSetMetaData.setColumnNameProperties(cursor, col.get("columnName")) - resultSetMetaData.setDataTypeProperties(cursor, col.get("dataType")) - resultSetMetaData.setCommentPropreties(cursor, col.get("comment")) + if (null != metaData) { + for (cursor <- 1 to metaData.size()) { + val col = metaData.get(cursor - 1) + resultSetMetaData.setColumnNameProperties(cursor, col.get("columnName")) + resultSetMetaData.setDataTypeProperties(cursor, col.get("dataType")) + resultSetMetaData.setCommentPropreties(cursor, col.get("comment")) + } } } @@ -233,36 +251,7 @@ class UJESSQLResultSet( } private def evaluate(dataType: String, value: String): Any = { - - if (value == null || value.equals("null") || value.equals("NULL") || value.equals("Null")) { - dataType.toLowerCase(Locale.getDefault) match { - case "string" | "char" | "varchar" | "nvarchar" => value - case _ => null - } - } else { - dataType.toLowerCase(Locale.getDefault) match { - case null => throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) - case "char" | "varchar" | "nvarchar" | "string" => value - case "short" => value.toShort - case "int" => value.toInt - case "long" => value.toLong - case "float" => value.toFloat - case "double" => value.toDouble - case "boolean" => value.toBoolean - case "byte" => value.toByte - case "timestamp" => value - case "date" => value - case "bigint" => value.toLong - case "decimal" => value.toDouble - case "array" => value.toArray - case "map" => value - case _ => - throw new LinkisSQLException( - LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR, - s"Can't infer the SQL type to use for an instance of ${dataType}. Use getObject() with an explicit Types value to specify the type to use" - ) - } - } + UJESClientUtils.evaluate(dataType, value) } private def getColumnValue(columnIndex: Int): Any = { @@ -299,6 +288,10 @@ class UJESSQLResultSet( } } + def clearNextResultSet: Any = { + this.nextResultSet = null + } + override def getBoolean(columnIndex: Int): Boolean = { val any = getColumnValue(columnIndex) if (wasNull()) { @@ -679,6 +672,8 @@ class UJESSQLResultSet( true } + def getNextResultSet: UJESSQLResultSet = this.nextResultSet + override def setFetchDirection(direction: Int): Unit = { throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java index 3ebd21ae70..e319cd0254 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java @@ -17,16 +17,26 @@ package org.apache.linkis.ujes.jdbc; +import org.apache.linkis.governance.common.entity.ExecutionNodeStatus; +import org.apache.linkis.governance.common.entity.task.RequestPersistTask; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.response.JobExecuteResult; +import org.apache.linkis.ujes.client.response.JobInfoResult; +import org.apache.linkis.ujes.client.response.ResultSetResult; + import java.sql.SQLException; +import java.sql.Statement; +import java.util.Properties; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.mockito.Mockito; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; /* * Notice: @@ -143,6 +153,184 @@ public void getConnWhenIsClosed() { } } + /** + * single query without next result set check point 1: getMoreResults returns false check point 2: + * default getMoreResults, use Statement.CLOSE_CURRENT_RESULT. The current result set is closed. + */ + @Test + public void singleQueryWithNoMoreResult() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1"}).when(jobInfoResult).getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(); + assertFalse(moreResults); + assertTrue(resultSet.isClosed()); + } + + /** + * multiple query without multiple result param, return one result check point 1: 2 sql executed. + * 1 result set + */ + @Test + public void multiQueryWithNoMoreResult() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(); + assertFalse(moreResults); + assertTrue(resultSet.isClosed()); + } + + /** + * multiple query executed with multiple result param is Y check point 1: getMoreResults returns + * true check point 2: current result is closed check point 3: second getMoreResults returns false + */ + @Test + public void multiQueryWithMoreResult() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(); + assertTrue(moreResults); + assertTrue(resultSet.isClosed()); + moreResults = linkisSQLStatement.getMoreResults(); + assertFalse(moreResults); + } + + /** + * multiple query executed with multiple result param is Y, and use + * LinkisSQLStatement.KEEP_CURRENT_RESULT check point 1: getMoreResults returns true check point + * 2: current result is not close check point 3: second getMoreResults returns false + */ + @Test + public void multiQueryWithMoreResultNotCloseCurrent() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(LinkisSQLStatement.KEEP_CURRENT_RESULT); + assertTrue(moreResults); + assertFalse(resultSet.isClosed()); + } + + /** + * multiple query executed with multiple result param is Y, and use + * LinkisSQLStatement.CLOSE_ALL_RESULTS check point 1: getMoreResults returns true check point 2: + * current result is not close check point 3: second getMoreResults returns false check point 4: + * first result set is closed after second invoke getMoreResults + */ + @Test + public void multiQueryWithMoreResultCloseAllOpenedCurrent() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(Statement.KEEP_CURRENT_RESULT); + assertTrue(moreResults); + assertFalse(resultSet.isClosed()); + moreResults = linkisSQLStatement.getMoreResults(Statement.CLOSE_ALL_RESULTS); + assertFalse(moreResults); + assertTrue(resultSet.isClosed()); + } + @AfterAll public static void closeStateAndConn() { if (statement != null) { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java index a8f0a179d0..c0631427ea 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java @@ -17,7 +17,14 @@ package org.apache.linkis.ujes.jdbc; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.request.ResultSetAction; +import org.apache.linkis.ujes.client.response.ResultSetResult; + import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -25,6 +32,10 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; /* * Notice: @@ -137,4 +148,101 @@ public void next() { Assertions.assertTrue(resultSet.isAfterLast()); } } + + /** single query result with no multiple result set check point 1: nextResultSet is null */ + @Test + public void singleQueryWithNoMoreResultSet() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + LinkisSQLConnection linkisSQLConnection = new LinkisSQLConnection(ujesClient, t); + + UJESSQLResultSet ujessqlResultSet = + new UJESSQLResultSet( + new String[] {"path1"}, new LinkisSQLStatement(linkisSQLConnection), 0, 0); + + ujessqlResultSet.next(); + + assertNull(ujessqlResultSet.getNextResultSet()); + } + + /** + * multiple result set with multi result switch is Y check point 1: queryResult has two path, + * return first path. check point 2: the second result set returned check point 3: the third + * result set is null + */ + @Test + public void nultiQueryWithMoreResultSet() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + List pathList = new ArrayList<>(); + Mockito.when(ujesClient.resultSet(any())) + .thenAnswer( + invocationOnMock -> { + ResultSetAction argument = invocationOnMock.getArgument(0); + String path = (String) argument.getParameters().get("path"); + if (pathList.isEmpty()) { + assertEquals("path1", path); + } + pathList.add(path); + + return new ResultSetResult(); + }); + LinkisSQLConnection linkisSQLConnection = new LinkisSQLConnection(ujesClient, t); + + UJESSQLResultSet ujessqlResultSet = + new UJESSQLResultSet( + new String[] {"path1", "path2"}, new LinkisSQLStatement(linkisSQLConnection), 0, 0); + + // 查询 + ujessqlResultSet.next(); + + // 存在下一个结果集 + UJESSQLResultSet nextResultSet = ujessqlResultSet.getNextResultSet(); + assertNotNull(nextResultSet); + nextResultSet.next(); + + // 不存在第三个结果集 + assertNull(nextResultSet.getNextResultSet()); + } + + /** + * multiple result set with multi result switch not Y check point 1: queryResult has two path, + * return last path. check point 2: the next result set is null + */ + @Test + public void nultiQueryWithNoMoreResultSet() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + Mockito.when(ujesClient.resultSet(any())) + .thenAnswer( + invocationOnMock -> { + ResultSetAction argument = invocationOnMock.getArgument(0); + String path = (String) argument.getParameters().get("path"); + assertEquals("path4", path); + + return new ResultSetResult(); + }); + + LinkisSQLConnection linkisSQLConnection = new LinkisSQLConnection(ujesClient, t); + + UJESSQLResultSet ujessqlResultSet = + new UJESSQLResultSet( + new String[] {"path1", "path2", "path3", "path4"}, + new LinkisSQLStatement(linkisSQLConnection), + 0, + 0); + + // 查询 + ujessqlResultSet.next(); + + // 即使查询有多个结果集,也不会产生多个结果集返回 + UJESSQLResultSet nextResultSet = ujessqlResultSet.getNextResultSet(); + assertNull(nextResultSet); + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml index 36076024f2..d6dee78ed2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml @@ -129,6 +129,12 @@ kubernetes-model-core ${kubernetes-client.version} + + + org.apache.linkis + linkis-ps-common-lock + ${project.version} + diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java deleted file mode 100644 index 59d9959431..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.loader; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.conf.Configuration; - -public class EngineConnPluginLoaderConf { - - public static final CommonVars ENGINE_PLUGIN_LOADER_DEFAULT_USER = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.defaultUser", "hadoop"); - - public static final CommonVars ENGINE_PLUGIN_STORE_PATH = - CommonVars.apply( - "wds.linkis.engineconn.plugin.loader.store.path", - CommonVars.apply( - "ENGINE_CONN_HOME", - Configuration.getLinkisHome() + "/lib/linkis-engineconn-plugins") - .getValue()); - - public static final CommonVars ENGINE_PLUGIN_PROPERTIES_NAME = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.properties.name", "plugins.properties"); - - public static final CommonVars ENGINE_PLUGIN_LOADER_CACHE_REFRESH_INTERVAL = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.cache.refresh-interval", "300"); - - public static final CommonVars DOWNLOAD_TEMP_DIR_PREFIX = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.download.tmpdir.prefix", ".BML_TMP_"); -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java index 059f984fa4..ef72664460 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java @@ -59,7 +59,8 @@ public CacheablesEngineConnPluginLoader() { @Override public long interval() { return Long.parseLong( - EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_CACHE_REFRESH_INTERVAL.getValue()); + EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_CACHE_REFRESH_INTERVAL() + .getValue()); } @Override diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java index e3ae5ccde1..0e54ed8c4e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java @@ -17,12 +17,13 @@ package org.apache.linkis.engineplugin.loader.loaders; +import org.apache.linkis.common.exception.ErrorException; import org.apache.linkis.engineplugin.loader.EngineConnPluginLoaderConf; import org.apache.linkis.engineplugin.loader.classloader.EngineConnPluginClassLoader; import org.apache.linkis.engineplugin.loader.loaders.resource.LocalEngineConnPluginResourceLoader; import org.apache.linkis.engineplugin.loader.loaders.resource.PluginResource; import org.apache.linkis.engineplugin.loader.utils.EngineConnPluginUtils; -import org.apache.linkis.manager.am.exception.AMErrorException; +import org.apache.linkis.engineplugin.loader.utils.ExceptionHelper; import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin; import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginLoadException; import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginNotFoundException; @@ -57,16 +58,17 @@ public class DefaultEngineConnPluginLoader extends CacheablesEngineConnPluginLoa private static final String PLUGIN_DIR = "plugin"; - public DefaultEngineConnPluginLoader() { + public DefaultEngineConnPluginLoader() throws ErrorException { // Check store path (is necessary) - String storePath = EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH.getValue(); + String storePath = EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH().getValue(); LOG.info("DefaultEngineConnPluginLoader, storePath:" + storePath); if (StringUtils.isBlank(storePath)) { - throw new AMErrorException( + ExceptionHelper.dealErrorException( 70061, "You should defined [" - + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH.key() - + "] in properties file"); + + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH().key() + + "] in properties file", + null); } // The path can be uri try { @@ -78,14 +80,17 @@ public DefaultEngineConnPluginLoader() { } catch (URISyntaxException e) { // Ignore } catch (IllegalArgumentException e) { - throw new AMErrorException( + ExceptionHelper.dealErrorException( 70061, - "You should defined [" - + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH.key() - + "] in properties file"); + "The value:[" + + storePath + + "] of [" + + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH().key() + + "] is incorrect", + e); } this.rootStorePath = storePath; - this.pluginPropsName = EngineConnPluginLoaderConf.ENGINE_PLUGIN_PROPERTIES_NAME.getValue(); + this.pluginPropsName = EngineConnPluginLoaderConf.ENGINE_PLUGIN_PROPERTIES_NAME().getValue(); // Prepare inner loaders // resourceLoaders.add(new BmlEngineConnPluginResourceLoader()); resourceLoaders.add(new LocalEngineConnPluginResourceLoader()); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java index 23607a7063..8b8a071480 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java @@ -58,13 +58,13 @@ public class BmlEngineConnPluginResourceLoader implements EngineConnPluginsResou private String downloadTmpDir; public BmlEngineConnPluginResourceLoader() { - this(EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_DEFAULT_USER.getValue(), null); + this(EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_DEFAULT_USER().getValue(), null); } public BmlEngineConnPluginResourceLoader(String clientUser, Map clientProps) { this.clientUser = clientUser; this.bmlClient = BmlClientFactory.createBmlClient(clientUser, clientProps); - this.downloadTmpDir = EngineConnPluginLoaderConf.DOWNLOAD_TEMP_DIR_PREFIX.getValue(); + this.downloadTmpDir = EngineConnPluginLoaderConf.DOWNLOAD_TEMP_DIR_PREFIX().getValue(); } @Override diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java index 5e71dadc11..2bfcd00aca 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java @@ -93,7 +93,7 @@ private static List getJarsUrlsOfPathRecurse(String path, List classPa parentFile.listFiles( (file) -> { String name = file.getName(); - return !file.isHidden() + return !name.startsWith(".") && (file.isDirectory() || name.endsWith(JAR_SUF_NAME) || name.endsWith(CLASS_SUF_NAME)); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java deleted file mode 100644 index 395471e78b..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.conf; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.conf.Configuration; - -public class EngineConnPluginConfiguration { - - public static final CommonVars ENGINE_CONN_HOME = - CommonVars.apply( - "wds.linkis.engineconn.home", - CommonVars.apply( - "ENGINE_CONN_HOME", - Configuration.getLinkisHome() + "/lib/linkis-engineconn-plugins") - .getValue()); - - public static final CommonVars ENGINE_CONN_DIST_LOAD_ENABLE = - CommonVars.apply("wds.linkis.engineconn.dist.load.enable", true); - - public static final CommonVars ENABLED_BML_UPLOAD_FAILED_EXIT = - CommonVars.apply("wds.linkis.engineconn.bml.upload.failed.enable", true); - - // for third party eg appconn/datax, if all update, can set to false then to remove - public static final CommonVars EC_BML_VERSION_MAY_WITH_PREFIX_V = - CommonVars.apply("linkis.engineconn.bml.version.may.with.prefix", true); -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java deleted file mode 100644 index 2c5d743205..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import org.apache.linkis.engineplugin.server.conf.EngineConnPluginConfiguration; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; - -import org.apache.commons.lang3.StringUtils; - -import java.io.File; -import java.nio.file.Paths; -import java.text.MessageFormat; -import java.util.Arrays; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.*; - -public abstract class AbstractEngineConnBmlResourceGenerator - implements EngineConnBmlResourceGenerator { - - private static final Logger logger = - LoggerFactory.getLogger(AbstractEngineConnBmlResourceGenerator.class); - - public AbstractEngineConnBmlResourceGenerator() { - if (!new File(getEngineConnsHome()).exists()) { - throw new EngineConnPluginErrorException( - CANNOT_HOME_PATH_EC.getErrorCode(), - MessageFormat.format(CANNOT_HOME_PATH_EC.getErrorDesc(), getEngineConnsHome())); - } - } - - public String getEngineConnsHome() { - return EngineConnPluginConfiguration.ENGINE_CONN_HOME.getValue(); - } - - protected String getEngineConnDistHome(EngineTypeLabel engineConnTypeLabel) { - return getEngineConnDistHome( - engineConnTypeLabel.getEngineType(), engineConnTypeLabel.getVersion()); - } - - protected String getEngineConnDistHome(String engineConnType, String version) { - String engineConnDistHome = - Paths.get(getEngineConnsHome(), engineConnType, "dist").toFile().getPath(); - checkEngineConnDistHome(engineConnDistHome); - if (StringUtils.isBlank(version) - || EngineConnBmlResourceGenerator.NO_VERSION_MARK.equals(version)) { - return engineConnDistHome; - } - String engineConnPackageHome = Paths.get(engineConnDistHome, version).toFile().getPath(); - logger.info("getEngineConnDistHome, engineConnPackageHome path:" + engineConnPackageHome); - File engineConnPackageHomeFile = new File(engineConnPackageHome); - if (!engineConnPackageHomeFile.exists()) { - if (!version.startsWith("v") - && (boolean) EngineConnPluginConfiguration.EC_BML_VERSION_MAY_WITH_PREFIX_V.getValue()) { - String versionOld = "v" + version; - String engineConnPackageHomeOld = - Paths.get(engineConnDistHome, versionOld).toFile().getPath(); - logger.info( - "try to getEngineConnDistHome with prefix v, engineConnPackageHome path:" - + engineConnPackageHomeOld); - File engineConnPackageHomeFileOld = new File(engineConnPackageHomeOld); - if (!engineConnPackageHomeFileOld.exists()) { - throw new EngineConnPluginErrorException( - ENGINE_VERSION_NOT_FOUND.getErrorCode(), - MessageFormat.format( - ENGINE_VERSION_NOT_FOUND.getErrorDesc(), version, engineConnType)); - } else { - return engineConnPackageHomeOld; - } - } else { - throw new EngineConnPluginErrorException( - ENGINE_VERSION_NOT_FOUND.getErrorCode(), - MessageFormat.format(ENGINE_VERSION_NOT_FOUND.getErrorDesc(), version, engineConnType)); - } - } else { - return engineConnPackageHome; - } - } - - private void checkEngineConnDistHome(String engineConnPackageHomePath) { - File engineConnPackageHomeFile = new File(engineConnPackageHomePath); - checkEngineConnDistHome(engineConnPackageHomeFile); - } - - private void checkEngineConnDistHome(File engineConnPackageHome) { - if (!engineConnPackageHome.exists()) { - throw new EngineConnPluginErrorException( - CANNOT_HOME_PATH_DIST.getErrorCode(), - MessageFormat.format( - CANNOT_HOME_PATH_DIST.getErrorDesc(), engineConnPackageHome.getPath())); - } - } - - protected String[] getEngineConnDistHomeList(String engineConnType) { - String engineConnDistHome = - Paths.get(getEngineConnsHome(), engineConnType, "dist").toFile().getPath(); - File engineConnDistHomeFile = new File(engineConnDistHome); - checkEngineConnDistHome(engineConnDistHomeFile); - File[] children = engineConnDistHomeFile.listFiles(); - if (children.length == 0) { - throw new EngineConnPluginErrorException( - DIST_IS_EMPTY.getErrorCode(), - MessageFormat.format(DIST_IS_EMPTY.getErrorDesc(), engineConnType)); - } else { - return Arrays.stream(children).map(File::getPath).toArray(String[]::new); - } - } - - @Override - public String[] getEngineConnTypeListFromDisk() { - return Arrays.stream(new File(getEngineConnsHome()).listFiles()) - .filter(file -> !file.isHidden() && file.isDirectory()) - .map(file -> file.getName()) - .toArray(String[]::new); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java deleted file mode 100644 index c8ebc50633..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import org.apache.linkis.common.utils.ZipUtils; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary; - -import java.io.File; -import java.text.MessageFormat; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.NO_PERMISSION_FILE; - -public class DefaultEngineConnBmlResourceGenerator extends AbstractEngineConnBmlResourceGenerator { - - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnBmlResourceGenerator.class); - - public DefaultEngineConnBmlResourceGenerator() {} - - @Override - public Map generate(String engineConnType) { - String[] engineConnDistHomes = getEngineConnDistHomeList(engineConnType); - Map resultMap = new HashMap<>(); - for (String path : engineConnDistHomes) { - - File versionFile = new File(path); - logger.info("generate, versionFile:" + path); - if (!versionFile.isDirectory()) { - logger.warn("File is not dir {},skip to upload", path); - continue; - } - String key = versionFile.getName(); - - try { - EngineConnLocalizeResource[] engineConnLocalizeResources = - generateDir(versionFile.getPath()); - resultMap.put(key, engineConnLocalizeResources); - } catch (Throwable t) { - logger.error("Generate dir : " + path + " error, msg : " + t.getMessage(), t); - throw t; - } - } - - return resultMap; - } - - @Override - public EngineConnLocalizeResource[] generate(String engineConnType, String version) { - String path = getEngineConnDistHome(engineConnType, version); - return generateDir(path); - } - - private EngineConnLocalizeResource[] generateDir(String path) { - File distFile = new File(path); - if (!distFile.isDirectory()) { - logger.warn("File is not dir {},skip to upload", path); - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.DIST_IRREGULAR_EXIST.getErrorCode(), - path + " is not dir, to delete this file then retry"); - } - logger.info("generateDir, distFile:" + path); - File[] validFiles = - distFile.listFiles( - f -> - !f.getName().endsWith(".zip") - || !new File(path, f.getName().replace(".zip", "")).exists()); - - return Arrays.stream(validFiles) - .map( - file -> { - if (file.isFile()) { - return new EngineConnLocalizeResourceImpl( - file.getPath(), file.getName(), file.lastModified(), file.length()); - } else { - File newFile = new File(path, file.getName() + ".zip"); - if (newFile.exists() && !newFile.delete()) { - throw new EngineConnPluginErrorException( - NO_PERMISSION_FILE.getErrorCode(), - MessageFormat.format(NO_PERMISSION_FILE.getErrorDesc(), newFile)); - } - - ZipUtils.fileToZip(file.getPath(), path, file.getName() + ".zip"); - // If it is a folder, the last update time here is the last update time of the - // folder, not the last update time of - // ZIP.(如果是文件夹,这里的最后更新时间,采用文件夹的最后更新时间,而不是ZIP的最后更新时间.) - return new EngineConnLocalizeResourceImpl( - newFile.getPath(), newFile.getName(), file.lastModified(), newFile.length()); - } - }) - .toArray(EngineConnLocalizeResource[]::new); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.java deleted file mode 100644 index 35a46fff60..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import java.util.Map; - -public interface EngineConnBmlResourceGenerator { - String NO_VERSION_MARK = "_default_"; - - String[] getEngineConnTypeListFromDisk(); - - Map generate(String engineConnType); - - EngineConnLocalizeResource[] generate(String engineConnType, String version); -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java deleted file mode 100644 index 4ca366e8bf..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.InputStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class EngineConnLocalizeResourceImpl implements EngineConnLocalizeResource { - private static final Logger logger = - LoggerFactory.getLogger(EngineConnLocalizeResourceImpl.class); - - private final String filePath; - private final String fileName; - private final long lastModified; - private final long fileSize; - - public EngineConnLocalizeResourceImpl( - String filePath, String fileName, long lastModified, long fileSize) { - this.filePath = filePath; - this.fileName = fileName; - this.lastModified = lastModified; - this.fileSize = fileSize; - } - - @Override - public InputStream getFileInputStream() { - try { - return new FileInputStream(filePath); - } catch (FileNotFoundException e) { - logger.warn("getFileInputStream failed filePath:[{}]", filePath, e); - } - return null; - } - - public String filePath() { - return filePath; - } - - public String fileName() { - return fileName; - } - - public long lastModified() { - return lastModified; - } - - public long fileSize() { - return fileSize; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java deleted file mode 100644 index 807daa2c97..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.engineplugin.server.loader.EngineConnPluginsLoaderFactory; -import org.apache.linkis.manager.am.exception.AMErrorCode; -import org.apache.linkis.manager.am.util.LinkisUtils; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder; -import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest; -import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResourceGenerator; -import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder; -import org.apache.linkis.manager.engineplugin.common.loader.entity.EngineConnPluginInstance; -import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.rpc.message.annotation.Receiver; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Component -public class DefaultEngineConnLaunchService implements EngineConnLaunchService { - - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnLaunchService.class); - - @Autowired private EngineConnResourceGenerator engineConnResourceGenerator; - - private EngineConnLaunchBuilder getEngineLaunchBuilder( - EngineTypeLabel engineTypeLabel, EngineConnBuildRequest engineBuildRequest) { - final EngineConnPluginInstance engineConnPluginInstance; - try { - engineConnPluginInstance = - EngineConnPluginsLoaderFactory.getEngineConnPluginsLoader() - .getEngineConnPlugin(engineTypeLabel); - } catch (Exception e) { - throw new EngineConnPluginErrorException( - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); - } - final EngineConnLaunchBuilder builder = - engineConnPluginInstance.plugin().getEngineConnLaunchBuilder(); - if (builder instanceof JavaProcessEngineConnLaunchBuilder) { - ((JavaProcessEngineConnLaunchBuilder) builder) - .setEngineConnResourceGenerator(engineConnResourceGenerator); - } - builder.setBuildRequest(engineBuildRequest); - return builder; - } - - @Override - @Receiver - public EngineConnLaunchRequest createEngineConnLaunchRequest( - EngineConnBuildRequest engineBuildRequest) { - final Optional engineTypeOption = - engineBuildRequest.labels().stream() - .filter(label -> label instanceof EngineTypeLabel) - .map(label -> (EngineTypeLabel) label) - .findFirst(); - - if (!engineTypeOption.isPresent()) { - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.ETL_REQUESTED.getErrorCode(), - EngineconnCoreErrorCodeSummary.ETL_REQUESTED.getErrorDesc()); - } - - final EngineTypeLabel engineTypeLabel = engineTypeOption.get(); - return LinkisUtils.tryCatch( - () -> getEngineLaunchBuilder(engineTypeLabel, engineBuildRequest).buildEngineConn(), - (Throwable t) -> { - logger.error( - String.format( - "Failed to createEngineConnLaunchRequest(%s)", engineBuildRequest.ticketId()), - t); - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.FAILED_CREATE_ELR.getErrorCode(), - String.format( - "%s, %s", - EngineconnCoreErrorCodeSummary.FAILED_CREATE_ELR.getErrorDesc(), - ExceptionUtils.getRootCauseMessage(t))); - }); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java deleted file mode 100644 index 2c0496c071..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.engineplugin.server.loader.EngineConnPluginsLoaderFactory; -import org.apache.linkis.manager.am.exception.AMErrorCode; -import org.apache.linkis.manager.am.exception.AMErrorException; -import org.apache.linkis.manager.common.entity.resource.NodeResource; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.common.loader.entity.EngineConnPluginInstance; -import org.apache.linkis.manager.engineplugin.common.resource.EngineResourceFactory; -import org.apache.linkis.manager.engineplugin.common.resource.EngineResourceRequest; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.rpc.message.annotation.Receiver; - -import org.springframework.stereotype.Component; - -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.ETL_REQUESTED; - -@Component -public class DefaultEngineConnResourceFactoryService implements EngineConnResourceFactoryService { - - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnResourceFactoryService.class); - - @Override - public EngineResourceFactory getResourceFactoryBy(EngineTypeLabel engineType) { - final EngineConnPluginInstance engineConnPluginInstance; - try { - engineConnPluginInstance = - EngineConnPluginsLoaderFactory.getEngineConnPluginsLoader() - .getEngineConnPlugin(engineType); - } catch (Exception e) { - logger.warn("getResourceFactory failed engineType:{}", engineType, e); - throw new AMErrorException( - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); - } - return engineConnPluginInstance.plugin().getEngineResourceFactory(); - } - - @Override - @Receiver - public NodeResource createEngineResource(final EngineResourceRequest engineResourceRequest) { - logger.info(String.format("To invoke createEngineResource %s", engineResourceRequest)); - final Optional engineTypeOption = - engineResourceRequest.labels().stream() - .filter(label -> label instanceof EngineTypeLabel) - .map(label -> (EngineTypeLabel) label) - .findFirst(); - - if (!engineTypeOption.isPresent()) { - throw new EngineConnPluginErrorException( - ETL_REQUESTED.getErrorCode(), ETL_REQUESTED.getErrorDesc()); - } - - final EngineTypeLabel engineTypeLabel = engineTypeOption.get(); - return getResourceFactoryBy(engineTypeLabel).createEngineResource(engineResourceRequest); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java deleted file mode 100644 index a33b1afde8..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.bml.client.BmlClient; -import org.apache.linkis.bml.client.BmlClientFactory; -import org.apache.linkis.bml.protocol.BmlUpdateResponse; -import org.apache.linkis.bml.protocol.BmlUploadResponse; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.engineplugin.server.conf.EngineConnPluginConfiguration; -import org.apache.linkis.engineplugin.server.dao.EngineConnBmlResourceDao; -import org.apache.linkis.engineplugin.server.entity.EngineConnBmlResource; -import org.apache.linkis.engineplugin.server.localize.EngineConnBmlResourceGenerator; -import org.apache.linkis.engineplugin.server.localize.EngineConnLocalizeResource; -import org.apache.linkis.manager.common.protocol.bml.BmlResource; -import org.apache.linkis.manager.common.protocol.bml.BmlResource.BmlResourceVisibility; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResource; -import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants; -import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary; -import org.apache.linkis.rpc.message.annotation.Receiver; - -import org.apache.commons.lang3.StringUtils; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import javax.annotation.PostConstruct; - -import java.text.MessageFormat; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION; - -@Component -public class DefaultEngineConnResourceService extends EngineConnResourceService { - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnResourceService.class); - - @Autowired private EngineConnBmlResourceGenerator engineConnBmlResourceGenerator; - - @Autowired private EngineConnBmlResourceDao engineConnBmlResourceDao; - - private final BmlClient bmlClient = BmlClientFactory.createBmlClient(); - private boolean isRefreshing = false; - - @PostConstruct - @Override - public void init() { - if ((boolean) EngineConnPluginConfiguration.ENGINE_CONN_DIST_LOAD_ENABLE.getValue()) { - logger.info("Start to refresh all engineconn plugins when inited."); - refreshAll(false, false); - } - } - - private BmlResource uploadToBml(final EngineConnLocalizeResource localizeResource) { - final BmlUploadResponse response = - bmlClient.uploadResource( - Utils.getJvmUser(), localizeResource.fileName(), localizeResource.getFileInputStream()); - final BmlResource bmlResource = new BmlResource(); - bmlResource.setResourceId(response.resourceId()); - bmlResource.setVersion(response.version()); - return bmlResource; - } - - private BmlResource uploadToBml( - final EngineConnLocalizeResource localizeResource, final String resourceId) { - final BmlUpdateResponse response = - bmlClient.updateResource( - Utils.getJvmUser(), - resourceId, - localizeResource.fileName(), - localizeResource.getFileInputStream()); - final BmlResource bmlResource = new BmlResource(); - bmlResource.setResourceId(response.resourceId()); - bmlResource.setVersion(response.version()); - return bmlResource; - } - - @Override - public void refreshAll(boolean iswait, boolean force) { - if (!isRefreshing) { - synchronized (this) { - if (!isRefreshing) { - - final Runnable refreshTask = - new Runnable() { - @Override - public void run() { - isRefreshing = true; - logger.info("Try to initialize the dist resources of all EngineConns. "); - String[] engineConnTypeList = - engineConnBmlResourceGenerator.getEngineConnTypeListFromDisk(); - for (String engineConnType : engineConnTypeList) { - try { - logger.info( - "Try to initialize all versions of {}EngineConn.", engineConnType); - Map version2Localize = - engineConnBmlResourceGenerator.generate(engineConnType); - for (Map.Entry entry : - version2Localize.entrySet()) { - logger.info( - "Try to initialize {}EngineConn-{}.", engineConnType, entry.getKey()); - refresh(entry.getValue(), engineConnType, entry.getKey(), force); - } - - } catch (Exception t) { - if (!iswait - && EngineConnPluginConfiguration.ENABLED_BML_UPLOAD_FAILED_EXIT - .getValue()) { - logger.error("Failed to upload engine conn to bml, now exit!", t); - System.exit(1); - } - logger.error("Failed to upload engine conn to bml", t); - } - } - isRefreshing = false; - } - }; - Future future = Utils.defaultScheduler().submit(refreshTask); - - if (iswait) { - try { - future.get(); - } catch (InterruptedException | ExecutionException e) { - logger.info("DefaultEngineConnResourceService refreshTask execution failed", e); - } - } else { - logger.info("DefaultEngineConnResourceService IsRefreshing EngineConns..."); - } - } - } - } - } - - @Receiver - public boolean refreshAll(final RefreshAllEngineConnResourceRequest engineConnRefreshAllRequest) { - logger.info("Start to refresh all engineconn plugins."); - refreshAll(true, false); - return true; - } - - @Receiver - @Override - public boolean refresh( - final RefreshEngineConnResourceRequest engineConnRefreshRequest, final boolean force) { - final String engineConnType = engineConnRefreshRequest.getEngineConnType(); - final String version = engineConnRefreshRequest.getVersion(); - if ("*".equals(version) || StringUtils.isEmpty(version)) { - logger.info("Try to refresh all versions of {}EngineConn.", engineConnType); - Map version2Localize = - engineConnBmlResourceGenerator.generate(engineConnType); - for (Map.Entry entry : version2Localize.entrySet()) { - logger.info("Try to initialize {}EngineConn-{}.", engineConnType, entry.getKey()); - refresh(entry.getValue(), engineConnType, entry.getKey(), force); - } - - } else { - logger.info("Try to refresh {}EngineConn-{}.", engineConnType, version); - EngineConnLocalizeResource[] localize = - engineConnBmlResourceGenerator.generate(engineConnType, version); - refresh(localize, engineConnType, version, force); - } - return true; - } - - private void refresh( - final EngineConnLocalizeResource[] localize, - final String engineConnType, - final String version, - final boolean force) { - final List engineConnBmlResources = - engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, version); - - if (Stream.of(localize) - .filter( - localizeResource -> - StringUtils.equals( - LaunchConstants.ENGINE_CONN_CONF_DIR_NAME() + ".zip", - localizeResource.fileName()) - || StringUtils.equals( - LaunchConstants.ENGINE_CONN_LIB_DIR_NAME() + ".zip", - localizeResource.fileName())) - .count() - < 2) { - - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.LIB_CONF_DIR_NECESSARY.getErrorCode(), - MessageFormat.format( - EngineconnCoreErrorCodeSummary.LIB_CONF_DIR_NECESSARY.getErrorDesc(), - engineConnType)); - } - - for (EngineConnLocalizeResource localizeResource : localize) { - - Optional resource = - engineConnBmlResources.stream() - .filter(r -> r.getFileName().equals(localizeResource.fileName())) - .findFirst(); - if (!resource.isPresent()) { - logger.info( - "Ready to upload a new bmlResource for {}EngineConn-{}. path: {}", - engineConnType, - version, - localizeResource.fileName()); - final BmlResource bmlResource = uploadToBml(localizeResource); - final EngineConnBmlResource engineConnBmlResource = new EngineConnBmlResource(); - engineConnBmlResource.setBmlResourceId(bmlResource.getResourceId()); - engineConnBmlResource.setBmlResourceVersion(bmlResource.getVersion()); - engineConnBmlResource.setCreateTime(new Date()); - engineConnBmlResource.setLastUpdateTime(new Date()); - engineConnBmlResource.setEngineConnType(engineConnType); - engineConnBmlResource.setFileName(localizeResource.fileName()); - engineConnBmlResource.setFileSize(localizeResource.fileSize()); - engineConnBmlResource.setLastModified(localizeResource.lastModified()); - engineConnBmlResource.setVersion(version); - engineConnBmlResourceDao.save(engineConnBmlResource); - } else { - boolean isChanged = - resource.get().getFileSize() != localizeResource.fileSize() - || resource.get().getLastModified() != localizeResource.lastModified(); - - if (isChanged || (!isChanged && force)) { - if (!isChanged && force) { - logger.info( - "The file has no change in {}EngineConn-{}, path: {}, but force to refresh", - engineConnType, - version, - localizeResource.fileName()); - } - logger.info( - "Ready to upload a refreshed bmlResource for {}EngineConn-{}. path: {}", - engineConnType, - version, - localizeResource.fileName()); - final EngineConnBmlResource engineConnBmlResource = resource.get(); - final BmlResource bmlResource = - uploadToBml(localizeResource, engineConnBmlResource.getBmlResourceId()); - engineConnBmlResource.setBmlResourceVersion(bmlResource.getVersion()); - engineConnBmlResource.setLastUpdateTime(new Date()); - engineConnBmlResource.setFileSize(localizeResource.fileSize()); - engineConnBmlResource.setLastModified(localizeResource.lastModified()); - engineConnBmlResourceDao.update(engineConnBmlResource); - } else { - logger.info( - "The file has no change in {}EngineConn-{}, path: {}", - engineConnType, - version, - localizeResource.fileName()); - } - } - } - } - - @Receiver - @Override - public EngineConnResource getEngineConnBMLResources( - final GetEngineConnResourceRequest engineConnBMLResourceRequest) { - final String engineConnType = engineConnBMLResourceRequest.getEngineConnType(); - final String version = engineConnBMLResourceRequest.getVersion(); - - List engineConnBmlResources = - engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, version); - if (engineConnBmlResources.size() == 0 - && (boolean) EngineConnPluginConfiguration.EC_BML_VERSION_MAY_WITH_PREFIX_V.getValue()) { - logger.info("Try to get engine conn bml resource with prefex v"); - engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, "v" + version); - } - - Optional confBmlResourceMap = - engineConnBmlResources.stream() - .filter( - r -> r.getFileName().equals(LaunchConstants.ENGINE_CONN_CONF_DIR_NAME() + ".zip")) - .map(this::parseToBmlResource) - .findFirst(); - Optional libBmlResourceMap = - engineConnBmlResources.stream() - .filter( - r -> r.getFileName().equals(LaunchConstants.ENGINE_CONN_LIB_DIR_NAME() + ".zip")) - .map(this::parseToBmlResource) - .findFirst(); - - if (!confBmlResourceMap.isPresent() || !libBmlResourceMap.isPresent()) { - throw new EngineConnPluginErrorException( - EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION.getErrorCode(), - EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION.getErrorDesc()); - } - final BmlResource confBmlResource = confBmlResourceMap.get(); - final BmlResource libBmlResource = libBmlResourceMap.get(); - BmlResource[] otherBmlResources = - engineConnBmlResources.stream() - .filter( - r -> - !r.getFileName().equals(LaunchConstants.ENGINE_CONN_CONF_DIR_NAME() + ".zip") - || r.getFileName() - .equals(LaunchConstants.ENGINE_CONN_LIB_DIR_NAME() + ".zip")) - .map(this::parseToBmlResource) - .toArray(BmlResource[]::new); - - return new EngineConnResource() { - @Override - public BmlResource getConfBmlResource() { - return confBmlResource; - } - - @Override - public BmlResource getLibBmlResource() { - return libBmlResource; - } - - @Override - public BmlResource[] getOtherBmlResources() { - return otherBmlResources; - } - }; - } - - private BmlResource parseToBmlResource(final EngineConnBmlResource engineConnBmlResource) { - final BmlResource bmlResource = new BmlResource(); - bmlResource.setFileName(engineConnBmlResource.getFileName()); - bmlResource.setOwner(Utils.getJvmUser()); - bmlResource.setResourceId(engineConnBmlResource.getBmlResourceId()); - bmlResource.setVersion(engineConnBmlResource.getBmlResourceVersion()); - bmlResource.setVisibility(BmlResourceVisibility.Public); - return bmlResource; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java deleted file mode 100644 index 5f2fb6cf69..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResource; -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResourceGenerator; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; - -public abstract class EngineConnResourceService implements EngineConnResourceGenerator { - - public abstract void init(); - - public abstract void refreshAll(boolean wait, boolean force); - - public abstract boolean refresh( - RefreshEngineConnResourceRequest engineConnRefreshRequest, boolean force); - - public abstract EngineConnResource getEngineConnBMLResources( - GetEngineConnResourceRequest engineConnBMLResourceRequest); - - @Override - public EngineConnResource getEngineConnBMLResources(EngineTypeLabel engineTypeLabel) { - GetEngineConnResourceRequest engineConnBMLResourceRequest = new GetEngineConnResourceRequest(); - engineConnBMLResourceRequest.setEngineConnType(engineTypeLabel.getEngineType()); - engineConnBMLResourceRequest.setVersion(engineTypeLabel.getVersion()); - return getEngineConnBMLResources(engineConnBMLResourceRequest); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java deleted file mode 100644 index 6bd41d2599..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.protocol.message.RequestMethod; -import org.apache.linkis.protocol.message.RequestProtocol; - -public class RefreshAllEngineConnResourceRequest implements RequestProtocol, RequestMethod { - @Override - public String method() { - return "/enginePlugin/engineConn/refreshAll"; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java index 069afd4f1b..803151d534 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java @@ -94,12 +94,7 @@ public void deleteEnginePluginBML(String ecType, String version, String username log.info("file {} delete success", ecType); } } catch (Exception e) { - log.warn( - "deleteEnginePluginBML failed ecType:[{}] version:[{}] username:[{}]", - ecType, - version, - username, - e); + e.printStackTrace(); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java index 9fc63ebcc8..064d61a6fb 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java @@ -24,5 +24,6 @@ public class LinkisManagerApplication { public static void main(String[] args) throws ReflectiveOperationException { LinkisBaseServerApp.main(args); + // DataWorkCloudApplication.main(args); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java index 0f018ca9de..5164542445 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java @@ -23,10 +23,54 @@ import org.apache.linkis.manager.common.entity.enumeration.MaintainType; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; public class AMConfiguration { + // The configuration key for the YARN queue name. + public static final String YARN_QUEUE_NAME_CONFIG_KEY = "wds.linkis.rm.yarnqueue"; + + // Identifier for cross-queue tasks. + public static final String CROSS_QUEUE = "crossQueue"; + + // Identifier for across-cluster tasks. + public static final String ACROSS_CLUSTER_TASK = "acrossClusterTask"; + + // Identifier for priority clusters. + public static final String PRIORITY_CLUSTER = "priorityCluster"; + + // Target identifier for distinguishing target clusters. + public static final String PRIORITY_CLUSTER_TARGET = "bdp"; + + // Origin identifier for distinguishing source clusters. + public static final String PRIORITY_CLUSTER_ORIGIN = "bdap"; + + // Configuration key for the target cluster CPU threshold. + public static final String TARGET_CPU_THRESHOLD = "targetCPUThreshold"; + + // Configuration key for the target cluster memory threshold. + public static final String TARGET_MEMORY_THRESHOLD = "targetMemoryThreshold"; + + // Configuration key for the target cluster CPU percentage threshold. + public static final String TARGET_CPU_PERCENTAGE_THRESHOLD = "targetCPUPercentageThreshold"; + + // Configuration key for the target cluster memory percentage threshold. + public static final String TARGET_MEMORY_PERCENTAGE_THRESHOLD = "targetMemoryPercentageThreshold"; + + // Configuration key for the origin cluster CPU percentage threshold. + public static final String ORIGIN_CPU_PERCENTAGE_THRESHOLD = "originCPUPercentageThreshold"; + + // Configuration key for the origin cluster memory percentage threshold. + public static final String ORIGIN_MEMORY_PERCENTAGE_THRESHOLD = "originMemoryPercentageThreshold"; + + public static final double ACROSS_CLUSTER_TOTAL_MEMORY_PERCENTAGE_THRESHOLD = + CommonVars.apply("linkis.yarn.across.cluster.memory.threshold", 0.8).getValue(); + + public static final double ACROSS_CLUSTER_TOTAL_CPU_PERCENTAGE_THRESHOLD = + CommonVars.apply("linkis.yarn.across.cluster.cpu.threshold", 0.8).getValue(); + public static final CommonVars ECM_ADMIN_OPERATIONS = CommonVars.apply("wds.linkis.governance.admin.operations", ""); @@ -39,50 +83,35 @@ public class AMConfiguration { public static final CommonVars ENGINE_REUSE_MAX_TIME = CommonVars.apply("wds.linkis.manager.am.engine.reuse.max.time", new TimeType("5m")); - public static final CommonVars ENGINE_REUSE_COUNT_LIMIT = - CommonVars.apply("wds.linkis.manager.am.engine.reuse.count.limit", 2); - - public static final CommonVars NODE_STATUS_HEARTBEAT_TIME = - CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("3m")); - - public static final CommonVars NODE_HEARTBEAT_MAX_UPDATE_TIME = - CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("5m")); + public static final Integer ENGINE_REUSE_COUNT_LIMIT = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.count.limit", 2).getValue(); public static final CommonVars DEFAULT_NODE_OWNER = CommonVars.apply("wds.linkis.manager.am.default.node.owner", "hadoop"); - public static final CommonVars STOP_ENGINE_WAIT = - CommonVars.apply("wds.linkis.manager.am.stop.engine.wait", new TimeType("5m")); - - public static final CommonVars STOP_EM_WAIT = - CommonVars.apply("wds.linkis.manager.am.stop.em.wait", new TimeType("5m")); - - public static final CommonVars EM_LABEL_INIT_WAIT = - CommonVars.apply("wds.linkis.manager.am.em.label.init.wait", new TimeType("5m")); - public static final CommonVars EM_NEW_WAIT_MILLS = CommonVars.apply("wds.linkis.manager.am.em.new.wait.mills", 1000 * 60L); - public static final CommonVars ENGINECONN_DEBUG_ENABLED = - CommonVars.apply("wds.linkis.engineconn.debug.mode.enable", false); - public static final CommonVars MULTI_USER_ENGINE_TYPES = CommonVars.apply( "wds.linkis.multi.user.engine.types", - "jdbc,es,presto,io_file,appconn,openlookeng,trino,nebula,hbase"); + "jdbc,es,presto,io_file,appconn,openlookeng,trino,jobserver,nebula,hbase,doris"); public static final CommonVars ALLOW_BATCH_KILL_ENGINE_TYPES = CommonVars.apply("wds.linkis.allow.batch.kill.engine.types", "spark,hive,python"); + public static final CommonVars UNALLOW_BATCH_KILL_ENGINE_TYPES = + CommonVars.apply("wds.linkis.allow.batch.kill.engine.types", "trino,appconn,io_file"); public static final CommonVars MULTI_USER_ENGINE_USER = CommonVars.apply("wds.linkis.multi.user.engine.user", getDefaultMultiEngineUser()); public static final CommonVars ENGINE_LOCKER_MAX_TIME = CommonVars.apply("wds.linkis.manager.am.engine.locker.max.time", 1000 * 60 * 5); - public static final CommonVars AM_CAN_RETRY_LOGS = + public static final String AM_CAN_RETRY_LOGS = CommonVars.apply( - "wds.linkis.manager.am.can.retry.logs", "already in use;Cannot allocate memory"); + "wds.linkis.manager.am.can.retry.logs", "already in use;Cannot allocate memory") + .getValue(); public static final int ASK_ENGINE_ASYNC_MAX_THREAD_SIZE = CommonVars.apply("wds.linkis.ecm.launch.max.thread.size", 200).getValue(); @@ -102,11 +131,56 @@ public class AMConfiguration { public static final Boolean NODE_SELECT_HOTSPOT_EXCLUSION_RULE = CommonVars.apply("linkis.node.select.hotspot.exclusion.rule.enable", true).getValue(); + public static final boolean EC_REUSE_WITH_RESOURCE_RULE_ENABLE = + CommonVars.apply("linkis.ec.reuse.with.resource.rule.enable", false).getValue(); + + public static final String EC_REUSE_WITH_RESOURCE_WITH_ECS = + CommonVars.apply("linkis.ec.reuse.with.resource.with.ecs", "spark,hive,shell,python") + .getValue(); + + public static final String SUPPORT_CLUSTER_RULE_EC_TYPES = + CommonVars.apply("linkis.support.cluster.rule.ec.types", "").getValue(); + + public static final boolean HIVE_CLUSTER_EC_EXECUTE_ONCE_RULE_ENABLE = + CommonVars.apply("linkis.hive.cluster.ec.execute.once.rule.enable", true).getValue(); + + public static final String LONG_LIVED_LABEL = + CommonVars.apply("linkis.label.node.long.lived.label.keys", "tenant|yarnCluster").getValue(); + + public static final String TMP_LIVED_LABEL = + CommonVars.apply("linkis.label.node.tmp.lived.label.keys", "taskId").getValue(); + + public static final boolean COMBINED_WITHOUT_YARN_DEFAULT = + CommonVars.apply("linkis.combined.without.yarn.default", true).getValue(); + + public static final Map AM_ENGINE_ASK_MAX_NUMBER = new HashMap<>(); + + static { + String keyValue = + CommonVars.apply("linkis.am.engine.ask.max.number", "appconn=5,trino=10").getValue(); + String[] keyValuePairs = keyValue.split(","); + for (String pair : keyValuePairs) { + String[] array = pair.split("="); + if (array.length != 2) { + throw new IllegalArgumentException( + "linkis.am.engine.ask.max.number value is illegal, value is " + pair); + } else { + AM_ENGINE_ASK_MAX_NUMBER.put(array[0], Integer.parseInt(array[1])); + } + } + } + + public static final boolean AM_ECM_RESET_RESOURCE = + CommonVars.apply("linkis.am.ecm.reset.resource.enable", true).getValue(); + + public static final boolean AM_USER_RESET_RESOURCE = + CommonVars.apply("linkis.am.user.reset.resource.enable", true).getValue(); + public static String getDefaultMultiEngineUser() { String jvmUser = Utils.getJvmUser(); return String.format( - "{jdbc:\"%s\", es: \"%s\", presto:\"%s\", appconn:\"%s\", openlookeng:\"%s\", trino:\"%s\", nebula:\"%s\", hbase:\"%s\",io_file:\"root\"}", - jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser); + "{jdbc:\"%s\", es: \"%s\", presto:\"%s\", appconn:\"%s\", openlookeng:\"%s\", trino:\"%s\", nebula:\"%s\",doris:\"%s\", hbase:\"%s\", jobserver:\"%s\",io_file:\"root\"}", + jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser); } public static boolean isMultiUserEngine(String engineType) { @@ -123,4 +197,14 @@ public static boolean isAllowKilledEngineType(String engineType) { Arrays.stream(allowBatchKillEngine).filter(e -> e.equalsIgnoreCase(engineType)).findFirst(); return findResult.isPresent(); } + + public static boolean isUnAllowKilledEngineType(String engineType) { + String[] unAllowBatchKillEngine = + AMConfiguration.UNALLOW_BATCH_KILL_ENGINE_TYPES.getValue().split(","); + Optional findResult = + Arrays.stream(unAllowBatchKillEngine) + .filter(e -> e.equalsIgnoreCase(engineType)) + .findFirst(); + return findResult.isPresent(); + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java index 039704351e..fa9843d955 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java @@ -18,7 +18,9 @@ package org.apache.linkis.manager.am.conf; import org.apache.linkis.common.conf.Configuration; -import org.apache.linkis.governance.common.protocol.conf.*; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfigWithGlobalConfig; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryGlobalConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.protocol.CacheableProtocol; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java index 3575491087..1492c6569f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java @@ -17,11 +17,13 @@ package org.apache.linkis.manager.am.conf; -import org.apache.linkis.manager.am.util.LinkisUtils; +import org.apache.linkis.common.utils.LinkisUtils; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.springframework.stereotype.Component; + import java.util.HashMap; import java.util.List; import java.util.Map; @@ -32,6 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@Component class DefaultEngineConnConfigurationService implements EngineConnConfigurationService { private static final Logger logger = diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java index c3a35f7921..bce581a2e9 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java @@ -28,12 +28,6 @@ public class ManagerMonitorConf { public static final CommonVars NODE_HEARTBEAT_MAX_UPDATE_TIME = CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("12m")); - public static final CommonVars ENGINE_KILL_TIMEOUT = - CommonVars.apply("wds.linkis.manager.am.engine.kill.timeout", new TimeType("2m")); - - public static final CommonVars EM_KILL_TIMEOUT = - CommonVars.apply("wds.linkis.manager.am.em.kill.timeout", new TimeType("2m")); - public static final CommonVars MANAGER_MONITOR_ASYNC_POLL_SIZE = CommonVars.apply("wds.linkis.manager.monitor.async.poll.size", 5); @@ -42,4 +36,7 @@ public class ManagerMonitorConf { public static final CommonVars ECM_HEARTBEAT_MAX_UPDATE_TIME = CommonVars.apply("wds.linkis.manager.am.ecm.heartbeat", new TimeType("5m")); + + public static final CommonVars ACROSS_QUEUES_RESOURCE_SHOW_SWITCH_ON = + CommonVars.apply("wds.linkis.manager.across.resource.show.switch.on", false); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java index 4736433ada..c7620157a1 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java @@ -23,7 +23,6 @@ import org.apache.linkis.manager.common.entity.metrics.NodeOverLoadInfo; import org.apache.linkis.manager.common.entity.metrics.NodeTaskInfo; import org.apache.linkis.manager.common.entity.node.AMNode; -import org.apache.linkis.manager.service.common.metrics.MetricsConverter; import org.apache.linkis.server.BDPJettyServerHelper; import org.apache.commons.lang3.StringUtils; @@ -55,7 +54,7 @@ public NodeTaskInfo parseTaskInfo(NodeMetrics nodeMetrics) { return taskInfo; } } catch (IOException e) { - logger.error("parse task info failed", e); + logger.warn("parse task info failed", e); } } return null; @@ -68,7 +67,7 @@ public NodeHealthyInfo parseHealthyInfo(NodeMetrics nodeMetrics) { try { return BDPJettyServerHelper.jacksonJson().readValue(healthyInfo, NodeHealthyInfo.class); } catch (IOException e) { - logger.error("parse healthy info failed", e); + logger.warn("parse healthy info failed", e); } } return null; @@ -81,7 +80,7 @@ public NodeOverLoadInfo parseOverLoadInfo(NodeMetrics nodeMetrics) { try { return BDPJettyServerHelper.jacksonJson().readValue(overLoad, NodeOverLoadInfo.class); } catch (IOException e) { - logger.error("parse over load info failed", e); + logger.warn("parse over load info failed", e); } } return null; @@ -97,7 +96,7 @@ public String convertTaskInfo(NodeTaskInfo nodeTaskInfo) { try { return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeTaskInfo); } catch (JsonProcessingException e) { - logger.error("convert task info failed", e); + logger.warn("convert task info failed", e); } return null; } @@ -107,7 +106,7 @@ public String convertHealthyInfo(NodeHealthyInfo nodeHealthyInfo) { try { return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeHealthyInfo); } catch (JsonProcessingException e) { - logger.error("convert healthy info failed", e); + logger.warn("convert healthy info failed", e); } return null; } @@ -117,7 +116,7 @@ public String convertOverLoadInfo(NodeOverLoadInfo nodeOverLoadInfo) { try { return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeOverLoadInfo); } catch (JsonProcessingException e) { - logger.error("convert over load info failed", e); + logger.warn("convert over load info failed", e); } return null; } @@ -129,7 +128,9 @@ public int convertStatus(NodeStatus nodeStatus) { @Override public AMNode fillMetricsToNode(AMNode amNode, NodeMetrics metrics) { - if (metrics == null) return amNode; + if (metrics == null) { + return amNode; + } amNode.setNodeStatus(parseStatus(metrics)); amNode.setNodeTaskInfo(parseTaskInfo(metrics)); amNode.setNodeHealthyInfo(parseHealthyInfo(metrics)); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/metrics/MetricsConverter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java similarity index 97% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/metrics/MetricsConverter.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java index acc46510cd..e84b577f45 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/metrics/MetricsConverter.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.metrics; +package org.apache.linkis.manager.am.converter; import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java index 4f3badbb9a..727fcc3133 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java @@ -17,10 +17,9 @@ package org.apache.linkis.manager.am.exception; -import org.apache.linkis.common.exception.ExceptionLevel; -import org.apache.linkis.common.exception.LinkisRuntimeException; +import org.apache.linkis.common.exception.ErrorException; -public class AMErrorException extends LinkisRuntimeException { +public class AMErrorException extends ErrorException { public AMErrorException(int errCode, String desc) { super(errCode, desc); @@ -30,9 +29,4 @@ public AMErrorException(int errCode, String desc, Throwable t) { this(errCode, desc); this.initCause(t); } - - @Override - public ExceptionLevel getLevel() { - return ExceptionLevel.ERROR; - } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java index a0928db981..13e45832c8 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java @@ -21,7 +21,6 @@ import org.apache.linkis.manager.label.entity.em.EMInstanceLabel; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; -import org.apache.linkis.manager.service.common.label.LabelChecker; import org.springframework.stereotype.Component; @@ -45,7 +44,6 @@ public boolean checkEMLabel(List> labelList) { @Override public boolean checkCorrespondingLabel(List> labelList, Class... clazz) { - // TODO: 是否需要做子类的判断 List> classes = Arrays.asList(clazz); return labelList.stream() .filter(Objects::nonNull) diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java index 5fa8e7db12..8820bc0be8 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java @@ -18,7 +18,9 @@ package org.apache.linkis.manager.am.label; import org.apache.linkis.governance.common.conf.GovernanceCommonConf; -import org.apache.linkis.manager.label.entity.*; +import org.apache.linkis.manager.label.entity.EMNodeLabel; +import org.apache.linkis.manager.label.entity.EngineNodeLabel; +import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.node.AliasServiceInstanceLabel; import org.apache.linkis.manager.service.common.label.LabelFilter; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelChecker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java similarity index 95% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelChecker.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java index 87dab1f333..0c1e27d086 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelChecker.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.label; +package org.apache.linkis.manager.am.label; import org.apache.linkis.manager.label.entity.Label; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java index 4fbe6894f4..b8ed766072 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java @@ -38,8 +38,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static scala.collection.JavaConverters.*; - @Component public class MultiUserEngineReuseLabelChooser implements EngineReuseLabelChooser { private static final Logger logger = diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java index 00adf1492b..fe93a03411 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java @@ -17,13 +17,13 @@ package org.apache.linkis.manager.am.locker; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; import org.apache.linkis.manager.common.entity.node.AMEngineNode; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.protocol.RequestEngineLock; import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; import org.apache.linkis.manager.common.protocol.RequestManagerUnlock; import org.apache.linkis.manager.common.protocol.engine.EngineLockType; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; import org.apache.linkis.rpc.message.annotation.Receiver; import org.springframework.beans.factory.annotation.Autowired; @@ -58,14 +58,14 @@ public void releaseLock(RequestManagerUnlock requestManagerUnlock) { logger.info( String.format( "client%s Start to unlock engine %s", - requestManagerUnlock.getClientInstance(), requestManagerUnlock.getEngineInstance())); + requestManagerUnlock.clientInstance(), requestManagerUnlock.engineInstance())); AMEngineNode engineNode = new AMEngineNode(); - engineNode.setServiceInstance(requestManagerUnlock.getEngineInstance()); - releaseLock(engineNode, requestManagerUnlock.getLock()); + engineNode.setServiceInstance(requestManagerUnlock.engineInstance()); + releaseLock(engineNode, requestManagerUnlock.lock()); logger.info( String.format( "client%s Finished to unlock engine %s", - requestManagerUnlock.getClientInstance(), requestManagerUnlock.getEngineInstance())); + requestManagerUnlock.clientInstance(), requestManagerUnlock.engineInstance())); } catch (Exception e) { logger.error("release lock failed", e); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java index cf66e88e5d..691aa635a4 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java @@ -18,6 +18,8 @@ package org.apache.linkis.manager.am.manager; import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.am.converter.MetricsConverter; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; import org.apache.linkis.manager.common.entity.node.*; import org.apache.linkis.manager.common.entity.persistence.PersistenceNodeEntity; @@ -30,8 +32,6 @@ import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; import org.apache.linkis.manager.rm.ResourceInfo; import org.apache.linkis.manager.rm.service.ResourceManager; -import org.apache.linkis.manager.service.common.metrics.MetricsConverter; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -150,7 +150,7 @@ public EMNode[] getEMNodes(ScoreServiceInstance[] scoreServiceInstances) { .filter(metrics -> metrics.getServiceInstance().equals(emNode.getServiceInstance())) .findFirst(); Optional optionRMNode = - resourceInfo.getResourceInfo().stream() + resourceInfo.resourceInfo().stream() .filter(rmNode -> rmNode.getServiceInstance().equals(emNode.getServiceInstance())) .findFirst(); @@ -171,7 +171,7 @@ public EMNode getEM(ServiceInstance serviceInstance) { emNode.setOwner(node.getOwner()); emNode.setServiceInstance(node.getServiceInstance()); if (node instanceof PersistenceNodeEntity) { - emNode.setStartTime(((PersistenceNodeEntity) node).getStartTime()); + emNode.setStartTime(node.getStartTime()); } emNode.setMark(emNode.getMark()); metricsConverter.fillMetricsToNode(emNode, nodeMetricManagerPersistence.getNodeMetrics(emNode)); @@ -197,7 +197,7 @@ public void pauseEM(ServiceInstance serviceInstance) {} /** * 1. request engineManager to launch engine * - * @param engineBuildRequest + * @param engineConnLaunchRequest * @param emNode * @return */ diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java index 14d548ef77..02b143d5cd 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java @@ -20,11 +20,12 @@ import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.common.exception.LinkisRetryException; import org.apache.linkis.manager.am.conf.AMConfiguration; +import org.apache.linkis.manager.am.converter.MetricsConverter; import org.apache.linkis.manager.am.exception.AMErrorCode; import org.apache.linkis.manager.am.exception.AMErrorException; import org.apache.linkis.manager.am.locker.EngineNodeLocker; -import org.apache.linkis.manager.am.utils.DefaultRetryHandler; -import org.apache.linkis.manager.am.utils.RetryHandler; +import org.apache.linkis.manager.am.pointer.EngineNodePointer; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; import org.apache.linkis.manager.common.constant.AMConstant; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; @@ -41,11 +42,12 @@ import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; import org.apache.linkis.manager.rm.ResourceInfo; import org.apache.linkis.manager.rm.service.ResourceManager; -import org.apache.linkis.manager.service.common.metrics.MetricsConverter; -import org.apache.linkis.manager.service.common.pointer.EngineNodePointer; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; + +import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.retry.annotation.Backoff; +import org.springframework.retry.annotation.Retryable; import org.springframework.stereotype.Service; import java.lang.reflect.UndeclaredThrowableException; @@ -104,9 +106,12 @@ public List listEngines(String user) { return nodes; } + @Retryable( + value = {feign.RetryableException.class, UndeclaredThrowableException.class}, + maxAttempts = 5, + backoff = @Backoff(delay = 10000)) @Override public EngineNode getEngineNodeInfo(EngineNode engineNode) { - /** Change the EngineNode to correspond to real-time requests?(修改为实时请求对应的EngineNode) */ EngineNodePointer engine = nodePointerBuilder.buildEngineNodePointer(engineNode); NodeHeartbeatMsg heartMsg = engine.getNodeHeartbeatMsg(); engineNode.setNodeHealthyInfo(heartMsg.getHealthyInfo()); @@ -155,7 +160,7 @@ public EngineNode switchEngine(EngineNode engineNode) { @Override public EngineNode reuseEngine(EngineNode engineNode) { EngineNode node = getEngineNodeInfo(engineNode); - if (!NodeStatus.isAvailable(node.getNodeStatus())) { + if (node == null || !NodeStatus.isAvailable(node.getNodeStatus())) { return null; } if (!NodeStatus.isLocked(node.getNodeStatus())) { @@ -183,22 +188,8 @@ public EngineNode reuseEngine(EngineNode engineNode) { */ @Override public EngineNode useEngine(EngineNode engineNode, long timeout) { - RetryHandler retryHandler = new DefaultRetryHandler(); - retryHandler.addRetryException(feign.RetryableException.class); - retryHandler.addRetryException(UndeclaredThrowableException.class); // wait until engine to be available - EngineNode node = retryHandler.retry(() -> getEngineNodeInfo(engineNode), "getEngineNodeInfo"); - long retryEndTime = System.currentTimeMillis() + 60 * 1000; - while ((node == null || !NodeStatus.isAvailable(node.getNodeStatus())) - && System.currentTimeMillis() < retryEndTime) { - node = retryHandler.retry(() -> getEngineNodeInfo(engineNode), "getEngineNodeInfo"); - try { - Thread.sleep(5 * 1000); - } catch (InterruptedException e) { - // ignore - } - } - + EngineNode node = getEngineNodeInfo(engineNode); if (node == null || !NodeStatus.isAvailable(node.getNodeStatus())) { return null; } @@ -216,8 +207,13 @@ public EngineNode useEngine(EngineNode engineNode, long timeout) { } } + @Override + public EngineNode useEngine(EngineNode engineNode) { + return useEngine(engineNode, AMConfiguration.ENGINE_LOCKER_MAX_TIME.getValue()); + } + /** - * Get detailed engine information from the persistence //TODO 是否增加owner到node + * Get detailed engine information from the persistence * * @param scoreServiceInstances * @return @@ -227,8 +223,9 @@ public EngineNode[] getEngineNodes(ScoreServiceInstance[] scoreServiceInstances) if (scoreServiceInstances == null || scoreServiceInstances.length == 0) { return null; } + List scoreServiceInstancesList = Arrays.asList(scoreServiceInstances); EngineNode[] engineNodes = - Arrays.stream(scoreServiceInstances) + scoreServiceInstancesList.stream() .map( scoreServiceInstance -> { AMEngineNode engineNode = new AMEngineNode(); @@ -237,42 +234,48 @@ public EngineNode[] getEngineNodes(ScoreServiceInstance[] scoreServiceInstances) return engineNode; }) .toArray(EngineNode[]::new); - // 1. add nodeMetrics 2 add RM info - ServiceInstance[] serviceInstances = - Arrays.stream(scoreServiceInstances) + + List serviceInstancesList = + scoreServiceInstancesList.stream() .map(ScoreServiceInstance::getServiceInstance) - .toArray(ServiceInstance[]::new); - ResourceInfo resourceInfo = resourceManager.getResourceInfo(serviceInstances); - - List nodeMetrics = - nodeMetricManagerPersistence.getNodeMetrics( - Arrays.stream(engineNodes).collect(Collectors.toList())); - Arrays.stream(engineNodes) - .forEach( - engineNode -> { - Optional optionMetrics = - nodeMetrics.stream() - .filter( - nodeMetric -> - nodeMetric - .getServiceInstance() - .equals(engineNode.getServiceInstance())) - .findFirst(); - - Optional optionRMNode = - resourceInfo.getResourceInfo().stream() - .filter( - resourceNode -> - resourceNode - .getServiceInstance() - .equals(engineNode.getServiceInstance())) - .findFirst(); - - optionMetrics.ifPresent( - metrics -> metricsConverter.fillMetricsToNode(engineNode, metrics)); - optionRMNode.ifPresent( - rmNode -> engineNode.setNodeResource(rmNode.getNodeResource())); - }); + .collect(Collectors.toList()); + + try { + ResourceInfo resourceInfo = + resourceManager.getResourceInfo(serviceInstancesList.toArray(new ServiceInstance[0])); + + if (serviceInstancesList.isEmpty()) { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, "Service instances cannot be empty."); + } + + List nodeMetrics = + nodeMetricManagerPersistence.getNodeMetrics(Arrays.asList(engineNodes)); + + for (EngineNode engineNode : engineNodes) { + Optional optionMetrics = + nodeMetrics.stream() + .filter( + nodeMetric -> + nodeMetric.getServiceInstance().equals(engineNode.getServiceInstance())) + .findFirst(); + + Optional optionRMNode = + resourceInfo.resourceInfo().stream() + .filter( + resourceNode -> + resourceNode.getServiceInstance().equals(engineNode.getServiceInstance())) + .findFirst(); + + optionMetrics.ifPresent(metrics -> metricsConverter.fillMetricsToNode(engineNode, metrics)); + optionRMNode.ifPresent(rmNode -> engineNode.setNodeResource(rmNode.getNodeResource())); + } + } catch (Exception e) { + LinkisRetryException linkisRetryException = + new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, "Failed to process data."); + linkisRetryException.initCause(e); + throw linkisRetryException; + } return engineNodes; } @@ -349,14 +352,16 @@ public EngineNode getEngineNodeInfo(ServiceInstance serviceInstance) { AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); } - + NodeMetrics nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode); if (engineNode.getNodeStatus() == null) { - NodeMetrics nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode); - if (Objects.nonNull(nodeMetric) && Objects.nonNull(nodeMetric.getStatus())) { + if (null != nodeMetric && null != nodeMetric.getStatus()) { engineNode.setNodeStatus(NodeStatus.values()[nodeMetric.getStatus()]); } else { engineNode.setNodeStatus(NodeStatus.Starting); } + if (null != nodeMetric && StringUtils.isNotBlank(nodeMetric.getHeartBeatMsg())) { + engineNode.setEcMetrics(nodeMetric.getHeartBeatMsg()); + } } return engineNode; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java index 8cb8362ecc..cf540036a3 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java @@ -20,6 +20,7 @@ import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.node.Node; import org.apache.linkis.manager.common.entity.node.ScoreServiceInstance; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; @@ -32,11 +33,11 @@ public interface EMNodeManager { void emRegister(EMNode emNode); - List listEngines(EMNode emNode); + List listEngines(EMNode emNode); - List listUserEngines(EMNode emNode, String user); + List listUserEngines(EMNode emNode, String user); - List listUserNodes(String user); + List listUserNodes(String user); /** * Get detailed em information from the persistence diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java index ce79d79c7e..7c3f64efee 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java @@ -65,5 +65,7 @@ void updateEngineStatus( EngineNode useEngine(EngineNode engineNode, long timeout); + EngineNode useEngine(EngineNode engineNode); + EngineOperateResponse executeOperation(EngineNode engineNode, EngineOperateRequest request); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java index 80390d0883..fc0a1fca24 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java @@ -17,12 +17,10 @@ package org.apache.linkis.manager.am.pointer; -import org.apache.linkis.common.exception.WarnException; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.node.Node; import org.apache.linkis.manager.common.protocol.node.*; import org.apache.linkis.manager.label.entity.Label; -import org.apache.linkis.manager.service.common.pointer.NodePointer; import org.apache.linkis.rpc.Sender; public abstract class AbstractNodePointer implements NodePointer { @@ -39,12 +37,8 @@ protected Sender getSender() { @Override public NodeStatus getNodeStatus() { Sender sender = getSender(); - try { - ResponseNodeStatus responseStatus = (ResponseNodeStatus) sender.ask(new RequestNodeStatus()); - return responseStatus.getNodeStatus(); - } catch (WarnException e) { - throw e; - } + ResponseNodeStatus responseStatus = (ResponseNodeStatus) sender.ask(new RequestNodeStatus()); + return responseStatus.getNodeStatus(); } /** @@ -55,12 +49,8 @@ public NodeStatus getNodeStatus() { @Override public NodeHeartbeatMsg getNodeHeartbeatMsg() { Sender sender = getSender(); - try { - NodeHeartbeatMsg heartbeatMsg = (NodeHeartbeatMsg) sender.ask(new NodeHeartbeatRequest()); - return heartbeatMsg; - } catch (WarnException e) { - throw e; - } + NodeHeartbeatMsg heartbeatMsg = (NodeHeartbeatMsg) sender.ask(new NodeHeartbeatRequest()); + return heartbeatMsg; } /** diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java index 1458680c98..07097fcb0b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java @@ -27,7 +27,6 @@ import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; -import org.apache.linkis.manager.service.common.pointer.EMNodPointer; import org.apache.linkis.server.BDPJettyServerHelper; import org.slf4j.Logger; @@ -73,28 +72,39 @@ public EngineNode createEngine(EngineConnLaunchRequest engineConnLaunchRequest) @Override public void stopEngine(EngineStopRequest engineStopRequest) { - Object result = getSender().ask(engineStopRequest); - if (result instanceof EngineStopResponse) { - EngineStopResponse engineStopResponse = (EngineStopResponse) result; - if (!engineStopResponse.getStopStatus()) { - logger.info( - "Kill engine : " - + engineStopRequest.getServiceInstance().toString() - + " failed, because " - + engineStopResponse.getMsg() - + " . Will ask engine to suicide."); + try { + Object result = getSender().ask(engineStopRequest); + if (result instanceof EngineStopResponse) { + EngineStopResponse engineStopResponse = (EngineStopResponse) result; + if (!engineStopResponse.getStopStatus()) { + logger.info( + "Kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, because " + + engineStopResponse.getMsg() + + " . Will ask engine to suicide."); + } else { + logger.info( + "Succeed to kill engine " + engineStopRequest.getServiceInstance().toString() + "."); + } } else { - logger.info( - "Succeed to kill engine " + engineStopRequest.getServiceInstance().toString() + "."); + logger.warn( + "Ask em : " + + getNode().getServiceInstance().toString() + + " to kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, response is : " + + BDPJettyServerHelper.gson().toJson(result) + + "."); } - } else { + } catch (Exception e) { logger.warn( "Ask em : " + getNode().getServiceInstance().toString() + " to kill engine : " + engineStopRequest.getServiceInstance().toString() - + " failed, response is : " - + BDPJettyServerHelper.gson().toJson(result) + + " failed, exception is : " + + e.getMessage() + "."); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java index 21f86f83d1..cdbbcbbf09 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java @@ -24,7 +24,6 @@ import org.apache.linkis.manager.common.protocol.ResponseEngineLock; import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; -import org.apache.linkis.manager.service.common.pointer.EngineNodePointer; import java.util.Optional; @@ -50,14 +49,11 @@ public Optional lockEngine(RequestEngineLock requestEngineLock) { Object result = getSender().ask(requestEngineLock); if (result instanceof ResponseEngineLock) { ResponseEngineLock responseEngineLock = (ResponseEngineLock) result; - if (responseEngineLock.getLockStatus()) { - return Optional.of(responseEngineLock.getLock()); + if (responseEngineLock.lockStatus()) { + return Optional.of(responseEngineLock.lock()); } else { logger.info( - "Failed to get locker," - + node.getServiceInstance() - + ": " - + responseEngineLock.getMsg()); + "Failed to get locker," + node.getServiceInstance() + ": " + responseEngineLock.msg()); return Optional.empty(); } } else { diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java index 03e0bc8087..58ec1ae940 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java @@ -19,9 +19,6 @@ import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; -import org.apache.linkis.manager.service.common.pointer.EMNodPointer; -import org.apache.linkis.manager.service.common.pointer.EngineNodePointer; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; import org.springframework.stereotype.Component; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EMNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EMNodPointer.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java index 6574563003..a85bf6eaca 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EMNodPointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EngineNodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EngineNodePointer.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java index cc24b746af..8be00a09c2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EngineNodePointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.protocol.RequestEngineLock; import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointer.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java index ca27bf8194..e5d519873e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.node.Node; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointerBuilder.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java similarity index 94% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointerBuilder.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java index e993a724b1..649e142399 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointerBuilder.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java index 0ff9ef93eb..70caae1a8d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java @@ -71,6 +71,7 @@ public class ECResourceInfoRestfulApi { public Message getECInfo( HttpServletRequest req, @RequestParam(value = "ticketid") String ticketid) throws AMErrorException { + logger.info("ticked: {} get ec info", ticketid); ECResourceInfoRecord ecResourceInfoRecord = ecResourceInfoService.getECResourceInfoRecord(ticketid); String userName = ModuleUserUtils.getOperationUser(req, "getECInfo ticketid:" + ticketid); @@ -87,7 +88,7 @@ public Message getECInfo( @ApiImplicitParams({ @ApiImplicitParam(name = "ticketid", required = true, dataType = "String", value = "ticket id") }) - @RequestMapping(path = "/delete/{ticketid}", method = RequestMethod.DELETE) + @RequestMapping(path = "/delete/{ticketid}}", method = RequestMethod.DELETE) public Message deleteECInfo(HttpServletRequest req, @PathVariable("ticketid") String ticketid) throws AMErrorException { ECResourceInfoRecord ecResourceInfoRecord = @@ -113,6 +114,7 @@ public Message deleteECInfo(HttpServletRequest req, @PathVariable("ticketid") St @ApiImplicitParam(name = "startDate", dataType = "String", value = "start date"), @ApiImplicitParam(name = "endDate", dataType = "String", value = "end date"), @ApiImplicitParam(name = "engineType", dataType = "String", value = "engine type"), + @ApiImplicitParam(name = "status", dataType = "String", value = "engine status"), @ApiImplicitParam(name = "pageNow", dataType = "String", value = "page now"), @ApiImplicitParam(name = "pageSize", dataType = "String", value = "page size") }) @@ -131,6 +133,7 @@ public Message queryEcrHistory( defaultValue = "#{new java.util.Date()}") Date endDate, @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "status", required = false) String status, @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) { String username = SecurityFilter.getLoginUsername(req); @@ -138,6 +141,7 @@ public Message queryEcrHistory( instance = ECResourceInfoUtils.strCheckAndDef(instance, null); String creatorUser = ECResourceInfoUtils.strCheckAndDef(creator, null); engineType = ECResourceInfoUtils.strCheckAndDef(engineType, null); + status = ECResourceInfoUtils.strCheckAndDef(status, null); if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { return Message.error("Invalid creator : " + creatorUser); } @@ -148,7 +152,7 @@ public Message queryEcrHistory( calendar.set(Calendar.SECOND, 0); startDate = calendar.getTime(); } - if (Configuration.isAdmin(username)) { + if (Configuration.isJobHistoryAdmin(username)) { username = null; if (StringUtils.isNotBlank(creatorUser)) { username = creatorUser; @@ -161,7 +165,7 @@ public Message queryEcrHistory( try { queryTasks = ecResourceInfoService.getECResourceInfoRecordList( - instance, endDate, startDate, username, engineType); + instance, endDate, startDate, username, engineType, status); queryTasks.forEach( info -> { ECResourceInfoRecordVo ecrHistroryListVo = new ECResourceInfoRecordVo(); @@ -188,25 +192,51 @@ public Message queryEcrHistory( @ApiImplicitParam(name = "creators", dataType = "Array", required = true, value = "creators"), @ApiImplicitParam(name = "engineTypes", dataType = "Array", value = "engine type"), @ApiImplicitParam(name = "statuss", dataType = "Array", value = "statuss"), + @ApiImplicitParam(name = "queueName", dataType = "String", value = "queueName"), + @ApiImplicitParam(name = "ecInstances", dataType = "Array", value = "ecInstances"), + @ApiImplicitParam(name = "crossCluster", dataType = "String", value = "crossCluster"), }) @RequestMapping(path = "/ecList", method = RequestMethod.POST) public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNode) { + String username = ModuleUserUtils.getOperationUser(req, "ecList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to query ecList.", token); + return Message.error("Token:" + token + " has no permission to query ecList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to query ecList.", username); + return Message.error("User:" + username + " has no permission to query ecList."); + } JsonNode creatorsParam = jsonNode.get("creators"); JsonNode engineTypesParam = jsonNode.get("engineTypes"); JsonNode statussParam = jsonNode.get("statuss"); + JsonNode queueNameParam = jsonNode.get("queueName"); + JsonNode ecInstancesParam = jsonNode.get("ecInstances"); + JsonNode crossClusterParam = jsonNode.get("crossCluster"); - if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { - return Message.error("creators is null in the parameters of the request(请求参数中【creators】为空)"); - } + // if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { + // return Message.error("creators is null in the parameters of the + // request(请求参数中【creators】为空)"); + // } List creatorUserList = new ArrayList<>(); - try { - creatorUserList = - JsonUtils.jackson() - .readValue(creatorsParam.toString(), new TypeReference>() {}); - } catch (JsonProcessingException e) { - return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + if (creatorsParam != null && !creatorsParam.isNull()) { + try { + creatorUserList = + JsonUtils.jackson() + .readValue(creatorsParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + } + for (String creatorUser : creatorUserList) { + if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { + return Message.error("Invalid creator: " + creatorUser); + } + } } List engineTypeList = new ArrayList<>(); @@ -230,35 +260,52 @@ public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNod return Message.error("parameters:statuss parsing failed(请求参数【statuss】解析失败)"); } } - - String username = ModuleUserUtils.getOperationUser(req, "ecList"); - - String token = ModuleUserUtils.getToken(req); - // check special admin token - if (StringUtils.isNotBlank(token)) { - if (!Configuration.isAdminToken(token)) { - logger.warn("Token:{} has no permission to query ecList.", token); - return Message.error("Token:" + token + " has no permission to query ecList."); + String queueName = ""; + if (queueNameParam != null && !queueNameParam.isNull()) { + try { + queueName = + JsonUtils.jackson() + .readValue(queueNameParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:queueName parsing failed(请求参数【queueName】解析失败)"); } - } else if (!Configuration.isAdmin(username)) { - logger.warn("User:{} has no permission to query ecList.", username); - return Message.error("User:" + username + " has no permission to query ecList."); } - - for (String creatorUser : creatorUserList) { - if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { - return Message.error("Invalid creator: " + creatorUser); + List ecInstancesList = new ArrayList<>(); + if (ecInstancesParam != null && !ecInstancesParam.isNull()) { + try { + ecInstancesList = + JsonUtils.jackson() + .readValue(ecInstancesParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:instanceName parsing failed(请求参数【ecInstances】解析失败)"); + } + } + Boolean isCrossCluster = null; + if (crossClusterParam != null && !crossClusterParam.isNull()) { + try { + isCrossCluster = + JsonUtils.jackson() + .readValue(crossClusterParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:crossCluster parsing failed(请求参数【crossCluster】解析失败)"); } } - logger.info( - "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}]", + "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}], queueName:{}, instanceNameList:{}", String.join(",", creatorUserList), String.join(",", engineTypeList), - String.join(",", statusStrList)); + String.join(",", statusStrList), + String.join(",", ecInstancesList), + queueNameParam); List> list = - ecResourceInfoService.getECResourceInfoList(creatorUserList, engineTypeList, statusStrList); + ecResourceInfoService.getECResourceInfoList( + creatorUserList, + engineTypeList, + statusStrList, + queueName, + ecInstancesList, + isCrossCluster); return Message.ok().data("ecList", list); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java index 4d5cb480d3..fbebd6813b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java @@ -28,22 +28,35 @@ import org.apache.linkis.manager.am.service.ECResourceInfoService; import org.apache.linkis.manager.am.service.em.ECMOperateService; import org.apache.linkis.manager.am.service.em.EMInfoService; +import org.apache.linkis.manager.am.service.engine.DefaultEngineCreateService; import org.apache.linkis.manager.am.utils.AMUtils; +import org.apache.linkis.manager.am.vo.CanCreateECRes; import org.apache.linkis.manager.am.vo.EMNodeVo; import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; -import org.apache.linkis.manager.common.protocol.OperateRequest; +import org.apache.linkis.manager.common.exception.RMErrorException; +import org.apache.linkis.manager.common.protocol.OperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; +import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineCreateRequest; +import org.apache.linkis.manager.exception.PersistenceErrorException; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.UserModifiable; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.manager.label.exception.LabelErrorException; import org.apache.linkis.manager.label.service.NodeLabelService; +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator; +import org.apache.linkis.manager.persistence.LabelManagerPersistence; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.manager.persistence.ResourceManagerPersistence; +import org.apache.linkis.manager.rm.external.service.ExternalResourceService; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -58,6 +71,7 @@ import javax.servlet.http.HttpServletRequest; +import java.text.MessageFormat; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -93,6 +107,17 @@ public class EMRestfulApi { @Autowired private ECMOperateService ecmOperateService; @Autowired private ECResourceInfoService ecResourceInfoService; + + @Autowired private ResourceManagerPersistence resourceManagerPersistence; + + @Autowired private LabelManagerPersistence labelManagerPersistence; + + @Autowired private ExternalResourceService externalResourceService; + + @Autowired private DefaultEngineCreateService defaultEngineCreateService; + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + private LabelBuilderFactory stdLabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory(); @@ -148,7 +173,7 @@ public Message listAllEMs( stream = stream.filter( em -> { - List> labels = em.getLabels(); + List