From 771a18940498f6dc55f25564f7cf83cd4618204b Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Sun, 4 Aug 2024 22:16:19 +0800 Subject: [PATCH 01/51] [ISSUE #5067] Enhancement for eventmesh-admin-server (#5068) * [ISSUE #5040] Support gtid mode for sync data with mysql * fix conflicts with master * fix checkstyle error * [ISSUE #5044] Data synchronization strong verification in mariadb gtid mode * fix checkstyle error * [ISSUE #5048] Add report verify request to admin for connector runtime * fix checkstyle error * [ISSUE #5052] Enhancement for source\sink connector * fix checkstyle error * fix checkstyle error * [ISSUE #5067] Enhancement for eventmesh-admin-server --- eventmesh-admin-server/conf/application.yaml | 10 ++- eventmesh-admin-server/conf/eventmesh.sql | 14 +-- .../conf/mapper/EventMeshJobInfoMapper.xml | 40 ++++----- .../conf/mapper/EventMeshTaskInfoMapper.xml | 13 +-- .../admin/server/AdminServerProperties.java | 5 ++ .../admin/server/web/HttpServer.java | 9 +- .../web/db/entity/EventMeshJobInfo.java | 8 +- .../web/db/entity/EventMeshTaskInfo.java | 10 ++- .../db/mapper/EventMeshJobInfoExtMapper.java | 18 +++- .../impl/EventMeshVerifyServiceImpl.java | 39 +++++++++ .../admin/server/web/pojo/JobDetail.java | 8 +- .../web/service/job/JobInfoBizService.java | 30 ++++--- .../web/service/task/TaskBizService.java | 55 +++++++++--- .../common/remote/TransportType.java | 6 +- .../common/remote/datasource/DataSource.java | 25 +++--- .../MySqlIncDataSourceSourceConf.java | 85 ------------------- .../request/CreateOrUpdateDataSourceReq.java | 5 +- .../remote/request/CreateTaskRequest.java | 33 ++++++- .../runtime/RuntimeInstanceConfig.java | 4 +- .../runtime/boot/RuntimeInstance.java | 44 +++++----- .../runtime/connector/ConnectorRuntime.java | 2 +- .../src/main/resources/runtime.yaml | 2 + 22 files changed, 272 insertions(+), 193 deletions(-) create mode 100644 eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java delete mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/MySqlIncDataSourceSourceConf.java diff --git a/eventmesh-admin-server/conf/application.yaml b/eventmesh-admin-server/conf/application.yaml index 54795057cb..afbcd4a438 100644 --- a/eventmesh-admin-server/conf/application.yaml +++ b/eventmesh-admin-server/conf/application.yaml @@ -28,5 +28,11 @@ mybatis-plus: log-impl: org.apache.ibatis.logging.stdout.StdOutImpl event-mesh: admin-server: - service-name: DEFAULT_GROUP@@em_adm_server - port: 8081 \ No newline at end of file + serviceName: DEFAULT_GROUP@@em_adm_server + port: 8081 + adminServerList: + region1: + - http://localhost:8081 + region2: + - http://localhost:8082 + region: region1 \ No newline at end of file diff --git a/eventmesh-admin-server/conf/eventmesh.sql b/eventmesh-admin-server/conf/eventmesh.sql index 82d5c53317..94edbb6fac 100644 --- a/eventmesh-admin-server/conf/eventmesh.sql +++ b/eventmesh-admin-server/conf/eventmesh.sql @@ -45,14 +45,15 @@ CREATE TABLE IF NOT EXISTS `event_mesh_data_source` ( CREATE TABLE IF NOT EXISTS `event_mesh_job_info` ( `id` int unsigned NOT NULL AUTO_INCREMENT, `jobID` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `desc` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, + `jobDesc` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, `taskID` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `transportType` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `sourceData` int NOT NULL DEFAULT '0', `targetData` int NOT NULL DEFAULT '0', - `state` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `jobState` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `jobType` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `fromRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, + `runningRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `createUid` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `updateUid` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -118,10 +119,11 @@ CREATE TABLE IF NOT EXISTS `event_mesh_runtime_history` ( CREATE TABLE IF NOT EXISTS `event_mesh_task_info` ( `id` int unsigned NOT NULL AUTO_INCREMENT, `taskID` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, - `name` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `desc` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, - `state` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT 'taskstate', - `fromRegion` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `taskName` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, + `taskDesc` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, + `taskState` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT 'taskstate', + `sourceRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, + `targetRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `createUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `updateUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, diff --git a/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml index 02e8806680..a053d1c838 100644 --- a/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml +++ b/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml @@ -19,31 +19,33 @@ --> + PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" + "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> - - - - - - - - - - - - - - + + + + + + + + + + + + + + + - id,jobID,desc, + id,jobID,jobDesc, taskID,transportType,sourceData, - targetData,state,jobType, - fromRegion,createTime,updateTime + targetData,jobState,jobType, + fromRegion,runningRegion,createUid, + updateUid,createTime,updateTime diff --git a/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml index 05b1dc52a0..c3514fd945 100644 --- a/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml +++ b/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml @@ -26,10 +26,11 @@ - - - - + + + + + @@ -37,8 +38,8 @@ - id,taskID,name, - desc,state,fromRegion, + id,taskID,taskName, + taskDesc,taskState,sourceRegion,targetRegion, createUid,updateUid,createTime, updateTime diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java index 2162731e21..612d398078 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java @@ -17,6 +17,9 @@ package org.apache.eventmesh.admin.server; +import java.util.List; +import java.util.Map; + import org.springframework.boot.context.properties.ConfigurationProperties; import lombok.Getter; @@ -32,4 +35,6 @@ public class AdminServerProperties { private String configurationPath; private String configurationFile; private String serviceName; + private Map> adminServerList; + private String region; } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java index bd896d546c..a5daac881e 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java @@ -24,18 +24,21 @@ import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; +import com.alibaba.druid.support.json.JSONUtils; + @RestController @RequestMapping("/eventmesh/admin") public class HttpServer { @Autowired private TaskBizService taskService; - @RequestMapping("/createTask") - public ResponseEntity> createOrUpdateTask(@RequestBody CreateTaskRequest task) { + @RequestMapping(value = "/createTask", method = RequestMethod.POST) + public ResponseEntity createOrUpdateTask(@RequestBody CreateTaskRequest task) { String uuid = taskService.createTask(task); - return ResponseEntity.ok(Response.success(uuid)); + return ResponseEntity.ok(JSONUtils.toJSONString(Response.success(uuid))); } public boolean deleteTask(Long id) { diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java index 23db5f6c2b..a77eaaaca2 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java @@ -37,7 +37,7 @@ public class EventMeshJobInfo implements Serializable { private String jobID; - private String desc; + private String jobDesc; private String taskID; @@ -47,12 +47,16 @@ public class EventMeshJobInfo implements Serializable { private Integer targetData; - private String state; + private String jobState; private String jobType; + // job request from region private String fromRegion; + // job actually running region + private String runningRegion; + private String createUid; private String updateUid; diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java index 5d1b6648c9..2d40f4a082 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java @@ -37,13 +37,15 @@ public class EventMeshTaskInfo implements Serializable { private String taskID; - private String name; + private String taskName; - private String desc; + private String taskDesc; - private String state; + private String taskState; - private String fromRegion; + private String sourceRegion; + + private String targetRegion; private String createUid; diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java index 7f46dcab41..c04c4e3748 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java @@ -21,11 +21,12 @@ import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; -import org.apache.ibatis.annotations.Options; import org.apache.ibatis.annotations.Param; import java.util.List; +import org.springframework.transaction.annotation.Transactional; + import com.baomidou.mybatisplus.core.mapper.BaseMapper; /** @@ -33,9 +34,18 @@ */ @Mapper public interface EventMeshJobInfoExtMapper extends BaseMapper { - @Insert("insert into event_mesh_job_info(`taskID`,`state`,`jobType`) values" - + "(#{job.taskID},#{job.state},#{job.jobType})") - @Options(useGeneratedKeys = true, keyProperty = "jobID") + + @Insert("") + @Transactional(rollbackFor = Exception.class) int saveBatch(@Param("jobs") List jobInfoList); } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java new file mode 100644 index 0000000000..5e49ba32ea --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshVerify; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshVerifyMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshVerifyService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * event_mesh_verify + */ +@Service +public class EventMeshVerifyServiceImpl extends ServiceImpl + implements EventMeshVerifyService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java index c47b284483..0e2fa64878 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java @@ -34,7 +34,7 @@ public class JobDetail { private String jobID; - private String desc; + private String jobDesc; private String taskID; @@ -50,7 +50,11 @@ public class JobDetail { private String updateUid; - private String region; + // job request from region + private String fromRegion; + + // job actually running region + private String runningRegion; private DataSource sourceDataSource; diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java index 9affa10e60..ea02658481 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.admin.server.web.service.job; +import org.apache.eventmesh.admin.server.AdminServerProperties; import org.apache.eventmesh.admin.server.AdminServerRuntimeException; import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; @@ -70,12 +71,15 @@ public class JobInfoBizService { @Autowired private PositionBizService positionBizService; + @Autowired + private AdminServerProperties properties; + public boolean updateJobState(String jobID, TaskState state) { if (jobID == null || state == null) { return false; } EventMeshJobInfo jobInfo = new EventMeshJobInfo(); - jobInfo.setState(state.name()); + jobInfo.setJobState(state.name()); return jobInfoService.update(jobInfo, Wrappers.update().eq("jobID", jobID).ne("state", TaskState.DELETE.name())); } @@ -86,34 +90,40 @@ public List createJobs(List jobs) { return null; } List entityList = new LinkedList<>(); + for (JobDetail job : jobs) { + // if running region not equal with admin region continue + if (!job.getRunningRegion().equals(properties.getRegion())) { + continue; + } EventMeshJobInfo entity = new EventMeshJobInfo(); - entity.setState(TaskState.INIT.name()); + entity.setJobState(TaskState.INIT.name()); entity.setTaskID(job.getTaskID()); entity.setJobType(job.getJobType().name()); - entity.setDesc(job.getDesc()); + entity.setJobDesc(job.getJobDesc()); String jobID = UUID.randomUUID().toString(); entity.setJobID(jobID); entity.setTransportType(job.getTransportType().name()); entity.setCreateUid(job.getCreateUid()); entity.setUpdateUid(job.getUpdateUid()); - entity.setFromRegion(job.getRegion()); + entity.setFromRegion(job.getFromRegion()); + entity.setRunningRegion(job.getRunningRegion()); CreateOrUpdateDataSourceReq source = new CreateOrUpdateDataSourceReq(); source.setType(job.getTransportType().getSrc()); source.setOperator(job.getCreateUid()); - source.setRegion(job.getRegion()); + source.setRegion(job.getSourceDataSource().getRegion()); source.setDesc(job.getSourceConnectorDesc()); - source.setConfig(job.getSourceDataSource()); + source.setConfig(job.getSourceDataSource().getConf()); EventMeshDataSource createdSource = dataSourceBizService.createDataSource(source); entity.setSourceData(createdSource.getId()); CreateOrUpdateDataSourceReq sink = new CreateOrUpdateDataSourceReq(); sink.setType(job.getTransportType().getDst()); sink.setOperator(job.getCreateUid()); - sink.setRegion(job.getRegion()); + sink.setRegion(job.getSinkDataSource().getRegion()); sink.setDesc(job.getSinkConnectorDesc()); - sink.setConfig(job.getSinkDataSource()); - EventMeshDataSource createdSink = dataSourceBizService.createDataSource(source); + sink.setConfig(job.getSinkDataSource().getConf()); + EventMeshDataSource createdSink = dataSourceBizService.createDataSource(sink); entity.setTargetData(createdSink.getId()); entityList.add(entity); @@ -167,7 +177,7 @@ public JobDetail getJobDetail(String jobID) { detail.setSinkConnectorDesc(target.getDescription()); } - TaskState state = TaskState.fromIndex(job.getState()); + TaskState state = TaskState.fromIndex(job.getJobState()); if (state == null) { throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "illegal job state in db"); } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java index b4fdc57af0..f686456135 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.admin.server.web.service.task; +import org.apache.eventmesh.admin.server.AdminServerProperties; import org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo; import org.apache.eventmesh.admin.server.web.db.service.EventMeshTaskInfoService; import org.apache.eventmesh.admin.server.web.pojo.JobDetail; @@ -24,13 +25,18 @@ import org.apache.eventmesh.common.remote.TaskState; import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.commons.lang3.StringUtils; + import java.util.List; +import java.util.Random; import java.util.UUID; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.client.RestTemplate; @Service public class TaskBizService { @@ -40,38 +46,67 @@ public class TaskBizService { @Autowired private JobInfoBizService jobInfoService; + @Autowired + private AdminServerProperties properties; + @Transactional public String createTask(CreateTaskRequest req) { - String taskID = UUID.randomUUID().toString(); + String taskID = req.getTaskId(); + if (StringUtils.isEmpty(taskID)) { + taskID = UUID.randomUUID().toString(); + req.setTaskId(taskID); + } + + String targetRegion = req.getTargetRegion(); + // not from other admin && target not equals with self region + if (!req.isFlag() && !StringUtils.equals(properties.getRegion(), targetRegion)) { + List adminServerList = properties.getAdminServerList().get(targetRegion); + if (adminServerList == null || adminServerList.isEmpty()) { + throw new RuntimeException("No admin server available for region: " + targetRegion); + } + String targetUrl = adminServerList.get(new Random().nextInt(adminServerList.size())) + "/eventmesh/admin/createTask"; + + RestTemplate restTemplate = new RestTemplate(); + req.setFlag(true); + ResponseEntity response = restTemplate.postForEntity(targetUrl, req, String.class); + if (!response.getStatusCode().is2xxSuccessful()) { + throw new RuntimeException("Failed to create task on admin server: " + targetUrl); + } + } + + String finalTaskID = taskID; List jobs = req.getJobs().stream().map(x -> { JobDetail job = parse(x); - job.setTaskID(taskID); - job.setRegion(req.getRegion()); + job.setTaskID(finalTaskID); job.setCreateUid(req.getUid()); job.setUpdateUid(req.getUid()); return job; }).collect(Collectors.toList()); jobInfoService.createJobs(jobs); EventMeshTaskInfo taskInfo = new EventMeshTaskInfo(); - taskInfo.setTaskID(taskID); - taskInfo.setName(req.getName()); - taskInfo.setDesc(req.getDesc()); - taskInfo.setState(TaskState.INIT.name()); + taskInfo.setTaskID(finalTaskID); + taskInfo.setTaskName(req.getTaskName()); + taskInfo.setTaskDesc(req.getTaskDesc()); + taskInfo.setTaskState(TaskState.INIT.name()); taskInfo.setCreateUid(req.getUid()); - taskInfo.setFromRegion(req.getRegion()); + taskInfo.setSourceRegion(req.getSourceRegion()); + taskInfo.setTargetRegion(req.getTargetRegion()); taskInfoService.save(taskInfo); - return taskID; + return finalTaskID; } private JobDetail parse(CreateTaskRequest.JobDetail src) { JobDetail dst = new JobDetail(); - dst.setDesc(src.getDesc()); + dst.setJobDesc(src.getJobDesc()); dst.setTransportType(src.getTransportType()); dst.setSourceConnectorDesc(src.getSourceConnectorDesc()); dst.setSourceDataSource(src.getSourceDataSource()); dst.setSinkConnectorDesc(src.getSinkConnectorDesc()); dst.setSinkDataSource(src.getSinkDataSource()); + // full/increase/check dst.setJobType(src.getJobType()); + dst.setFromRegion(src.getFromRegion()); + dst.setRunningRegion(src.getRunningRegion()); return dst; } } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java index 95a88a23fa..82e7bc021d 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java @@ -30,8 +30,12 @@ public enum TransportType { REDIS_REDIS(DataSourceType.REDIS, DataSourceType.REDIS), ROCKETMQ_ROCKETMQ(DataSourceType.ROCKETMQ, DataSourceType.ROCKETMQ), MYSQL_HTTP(DataSourceType.MYSQL, DataSourceType.HTTP), + ROCKETMQ_HTTP(DataSourceType.ROCKETMQ, DataSourceType.HTTP), HTTP_MYSQL(DataSourceType.HTTP, DataSourceType.MYSQL), - REDIS_MQ(DataSourceType.REDIS, DataSourceType.ROCKETMQ); + HTTP_REDIS(DataSourceType.HTTP, DataSourceType.REDIS), + HTTP_ROCKETMQ(DataSourceType.HTTP, DataSourceType.ROCKETMQ), + REDIS_MQ(DataSourceType.REDIS, DataSourceType.ROCKETMQ), + ; private static final Map INDEX_TYPES = new HashMap<>(); private static final TransportType[] TYPES = TransportType.values(); private static final String SEPARATOR = "@"; diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java index 7af3812f24..afda984805 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java @@ -17,27 +17,30 @@ package org.apache.eventmesh.common.remote.datasource; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; + import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import lombok.Getter; +import lombok.Data; -@Getter +@Data public class DataSource { - private final DataSourceType type; + + private DataSourceType type; + private String desc; @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) @JsonSubTypes({ - @JsonSubTypes.Type(value = MySqlIncDataSourceSourceConf.class, name = "MySqlIncDataSourceSourceConf") + @JsonSubTypes.Type(value = CanalSourceConfig.class, name = "CanalSourceConfig"), + @JsonSubTypes.Type(value = CanalSinkConfig.class, name = "CanalSinkConfig") }) - private final DataSourceConf conf; - private final Class confClazz; + private Config conf; - public DataSource(DataSourceType type, DataSourceConf conf) { - this.type = type; - this.conf = conf; - this.confClazz = conf.getConfClass(); - } + private Class confClazz; + private String region; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/MySqlIncDataSourceSourceConf.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/MySqlIncDataSourceSourceConf.java deleted file mode 100644 index f8c825e963..0000000000 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/MySqlIncDataSourceSourceConf.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.common.remote.datasource; - -import org.apache.eventmesh.common.config.connector.rdb.canal.SourceConnectorConfig; -import org.apache.eventmesh.common.remote.job.SyncConsistency; -import org.apache.eventmesh.common.remote.job.SyncMode; -import org.apache.eventmesh.common.remote.offset.RecordPosition; - -import java.util.List; - -public class MySqlIncDataSourceSourceConf extends DataSourceConf { - @Override - public Class getConfClass() { - return MySqlIncDataSourceSourceConf.class; - } - - private String destination; - - private Long canalInstanceId; - - private String desc; - - private boolean ddlSync = true; - - private boolean filterTableError = false; - - private Long slaveId; - - private Short clientId; - - private String serverUUID; - - private boolean isMariaDB = true; - - private boolean isGTIDMode = true; - - private Integer batchSize = 10000; - - private Long batchTimeout = -1L; - - private String tableFilter; - - private String fieldFilter; - - private List recordPositions; - - // ================================= channel parameter - // ================================ - - // enable remedy - private Boolean enableRemedy = false; - - // sync mode: field/row - private SyncMode syncMode; - - // sync consistency - private SyncConsistency syncConsistency; - - // ================================= system parameter - // ================================ - - // Column name of the bidirectional synchronization mark - private String needSyncMarkTableColumnName = "needSync"; - - // Column value of the bidirectional synchronization mark - private String needSyncMarkTableColumnValue = "needSync"; - - private SourceConnectorConfig sourceConnectorConfig; -} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java index 4ecf9b4527..fadfa68e75 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.common.remote.request; -import org.apache.eventmesh.common.remote.datasource.DataSource; +import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.remote.datasource.DataSourceType; import lombok.Data; @@ -29,10 +29,11 @@ @Data @EqualsAndHashCode(callSuper = true) public class CreateOrUpdateDataSourceReq extends BaseRemoteRequest { + private Integer id; private DataSourceType type; private String desc; - private DataSource config; + private Config config; private String region; private String operator; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java index ce24e03416..47c45595af 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java @@ -30,16 +30,35 @@ */ @Data public class CreateTaskRequest { - private String name; - private String desc; + + private String taskId; + + // task name + private String taskName; + + // task description + private String taskDesc; + + // task owner or updater private String uid; + private List jobs; - private String region; + + // task source region + private String sourceRegion; + + // task target region + private String targetRegion; + + // mark request send by other region admin, default is false + private boolean flag = false; @Data public static class JobDetail { - private String desc; + private String jobDesc; + + // full/increase/check private JobType jobType; private DataSource sourceDataSource; @@ -51,5 +70,11 @@ public static class JobDetail { private String sinkConnectorDesc; private TransportType transportType; + + // job request from region + private String fromRegion; + + // job actually running region + private String runningRegion; } } diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java index 7171b3fc27..caa5330fe3 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java @@ -28,6 +28,8 @@ @Config(path = "classPath://runtime.yaml") public class RuntimeInstanceConfig { + private boolean registryEnabled; + private String registryServerAddr; private String registryPluginType; @@ -36,7 +38,7 @@ public class RuntimeInstanceConfig { private String adminServiceName; - private String adminServerAddr; + private String adminServiceAddr; private ComponentType componentType; diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java index 0fade897f6..acea321e95 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java @@ -41,11 +41,11 @@ @Slf4j public class RuntimeInstance { - private String adminServerAddr = "127.0.0.1:8081"; + private String adminServiceAddr; private Map adminServerInfoMap = new HashMap<>(); - private final RegistryService registryService; + private RegistryService registryService; private Runtime runtime; @@ -57,29 +57,34 @@ public class RuntimeInstance { public RuntimeInstance(RuntimeInstanceConfig runtimeInstanceConfig) { this.runtimeInstanceConfig = runtimeInstanceConfig; - this.registryService = RegistryFactory.getInstance(runtimeInstanceConfig.getRegistryPluginType()); + if (runtimeInstanceConfig.isRegistryEnabled()) { + this.registryService = RegistryFactory.getInstance(runtimeInstanceConfig.getRegistryPluginType()); + } } public void init() throws Exception { - registryService.init(); - QueryInstances queryInstances = new QueryInstances(); - queryInstances.setServiceName(runtimeInstanceConfig.getAdminServiceName()); - queryInstances.setHealth(true); - List adminServerRegisterInfoList = registryService.selectInstances(queryInstances); - if (!adminServerRegisterInfoList.isEmpty()) { - adminServerAddr = getRandomAdminServerAddr(adminServerRegisterInfoList); - } else { - throw new RuntimeException("admin server address is empty, please check"); + if (registryService != null) { + registryService.init(); + QueryInstances queryInstances = new QueryInstances(); + queryInstances.setServiceName(runtimeInstanceConfig.getAdminServiceName()); + queryInstances.setHealth(true); + List adminServerRegisterInfoList = registryService.selectInstances(queryInstances); + if (!adminServerRegisterInfoList.isEmpty()) { + adminServiceAddr = getRandomAdminServerAddr(adminServerRegisterInfoList); + } else { + throw new RuntimeException("admin server address is empty, please check"); + } + // use registry adminServiceAddr value replace config + runtimeInstanceConfig.setAdminServiceAddr(adminServiceAddr); } - runtimeInstanceConfig.setAdminServerAddr(adminServerAddr); + runtimeFactory = initRuntimeFactory(runtimeInstanceConfig); runtime = runtimeFactory.createRuntime(runtimeInstanceConfig); runtime.init(); } public void start() throws Exception { - if (!StringUtils.isBlank(adminServerAddr)) { - + if (!StringUtils.isBlank(adminServiceAddr) && registryService != null) { registryService.subscribe((event) -> { log.info("runtime receive registry event: {}", event); List registerServerInfoList = event.getInstances(); @@ -91,7 +96,6 @@ public void start() throws Exception { adminServerInfoMap = registerServerInfoMap; updateAdminServerAddr(); } - }, runtimeInstanceConfig.getAdminServiceName()); runtime.start(); isStarted = true; @@ -106,14 +110,14 @@ public void shutdown() throws Exception { private void updateAdminServerAddr() throws Exception { if (isStarted) { - if (!adminServerInfoMap.containsKey(adminServerAddr)) { - adminServerAddr = getRandomAdminServerAddr(adminServerInfoMap); - log.info("admin server address changed to: {}", adminServerAddr); + if (!adminServerInfoMap.containsKey(adminServiceAddr)) { + adminServiceAddr = getRandomAdminServerAddr(adminServerInfoMap); + log.info("admin server address changed to: {}", adminServiceAddr); shutdown(); start(); } } else { - adminServerAddr = getRandomAdminServerAddr(adminServerInfoMap); + adminServiceAddr = getRandomAdminServerAddr(adminServerInfoMap); } } diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java index 6cd0452b83..1e589ebd97 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java @@ -150,7 +150,7 @@ public void init() throws Exception { private void initAdminService() { // create gRPC channel - channel = ManagedChannelBuilder.forTarget(runtimeInstanceConfig.getAdminServerAddr()).usePlaintext().build(); + channel = ManagedChannelBuilder.forTarget(runtimeInstanceConfig.getAdminServiceAddr()).usePlaintext().build(); adminServiceStub = AdminServiceGrpc.newStub(channel).withWaitForReady(); diff --git a/eventmesh-runtime-v2/src/main/resources/runtime.yaml b/eventmesh-runtime-v2/src/main/resources/runtime.yaml index 44c5f6f91f..c5ffac9d92 100644 --- a/eventmesh-runtime-v2/src/main/resources/runtime.yaml +++ b/eventmesh-runtime-v2/src/main/resources/runtime.yaml @@ -16,7 +16,9 @@ # componentType: CONNECTOR +registryEnabled: false registryServerAddr: 127.0.0.1:8085 registryPluginType: nacos storagePluginType: memory adminServiceName: eventmesh-admin +adminServiceAddr: "127.0.0.1:8085;127.0.0.1:8086" From a6018dd28ddcc4b5a1888617a2e8990b0969a815 Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Wed, 7 Aug 2024 17:18:08 +0800 Subject: [PATCH 02/51] [ISSUE #5069] Enhancement for http source/sink connector (#5070) * [ISSUE #5069] Enhancement for http source/sink connector * update http source connector & config * fix checkstyle error --- .../connector/http/SourceConnectorConfig.java | 3 + .../sink/handle/CommonHttpSinkHandler.java | 101 +++++++++++------- .../http/source/HttpSourceConnector.java | 36 +++++-- .../source/protocol/impl/CommonProtocol.java | 15 ++- 4 files changed, 100 insertions(+), 55 deletions(-) diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java index 4f69f55042..b7f075e6d3 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java @@ -55,4 +55,7 @@ public class SourceConnectorConfig { // extra config, e.g. GitHub secret private Map extraConfig = new HashMap<>(); + + // data consistency enabled, default true + private boolean dataConsistencyEnabled = true; } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java index c6cc90e0e0..4bc365a139 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java @@ -21,6 +21,8 @@ import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.connector.http.util.HttpUtils; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.net.URI; @@ -111,14 +113,70 @@ public void handle(ConnectRecord record) { // convert ConnectRecord to HttpConnectRecord String type = String.format("%s.%s.%s", connectorConfig.getConnectorName(), url.getScheme(), "common"); HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - deliver(url, httpConnectRecord); + // get timestamp and offset + Long timestamp = httpConnectRecord.getData().getTimestamp(); + Map offset = null; + try { + // May throw NullPointerException. + offset = ((HttpRecordOffset) httpConnectRecord.getData().getPosition().getRecordOffset()).getOffsetMap(); + } catch (NullPointerException e) { + // ignore null pointer exception + } + final Map finalOffset = offset; + Future> responseFuture = deliver(url, httpConnectRecord); + responseFuture.onSuccess(res -> { + log.info("Request sent successfully. Record: timestamp={}, offset={}", timestamp, finalOffset); + // log the response + if (HttpUtils.is2xxSuccessful(res.statusCode())) { + if (log.isDebugEnabled()) { + log.debug("Received successful response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", + res.statusCode(), timestamp, finalOffset, res.bodyAsString()); + } else { + log.info("Received successful response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, + finalOffset); + } + record.getCallback().onSuccess(convertToSendResult(record)); + } else { + if (log.isDebugEnabled()) { + log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", + res.statusCode(), timestamp, finalOffset, res.bodyAsString()); + } else { + log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, + finalOffset); + } + record.getCallback() + .onException(buildSendExceptionContext(record, new RuntimeException("HTTP response code: " + res.statusCode()))); + } + }).onFailure(err -> { + log.error("Request failed to send. Record: timestamp={}, offset={}", timestamp, finalOffset, err); + record.getCallback().onException(buildSendExceptionContext(record, err)); + }); + } + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + + private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { + SendExceptionContext sendExceptionContext = new SendExceptionContext(); + sendExceptionContext.setMessageId(record.getRecordId()); + sendExceptionContext.setCause(e); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + sendExceptionContext.setTopic(record.getExtension("topic")); } + return sendExceptionContext; } /** - * Processes HttpConnectRecord on specified URL while returning its own processing logic. - * This method sends the HttpConnectRecord to the specified URL using the WebClient. + * Processes HttpConnectRecord on specified URL while returning its own processing logic. This method sends the HttpConnectRecord to the specified + * URL using the WebClient. * * @param url URI to which the HttpConnectRecord should be sent * @param httpConnectRecord HttpConnectRecord to process @@ -130,48 +188,13 @@ public Future> deliver(URI url, HttpConnectRecord httpConne MultiMap headers = HttpHeaders.headers() .set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=utf-8") .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8"); - - // get timestamp and offset - Long timestamp = httpConnectRecord.getData().getTimestamp(); - Map offset = null; - try { - // May throw NullPointerException. - offset = ((HttpRecordOffset) httpConnectRecord.getData().getPosition().getRecordOffset()).getOffsetMap(); - } catch (NullPointerException e) { - // ignore null pointer exception - } - final Map finalOffset = offset; - // send the request return this.webClient.post(url.getPath()) .host(url.getHost()) .port(url.getPort() == -1 ? (Objects.equals(url.getScheme(), "https") ? 443 : 80) : url.getPort()) .putHeaders(headers) .ssl(Objects.equals(url.getScheme(), "https")) - .sendJson(httpConnectRecord) - .onSuccess(res -> { - log.info("Request sent successfully. Record: timestamp={}, offset={}", timestamp, finalOffset); - // log the response - if (HttpUtils.is2xxSuccessful(res.statusCode())) { - if (log.isDebugEnabled()) { - log.debug("Received successful response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", - res.statusCode(), timestamp, finalOffset, res.bodyAsString()); - } else { - log.info("Received successful response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, - finalOffset); - } - } else { - if (log.isDebugEnabled()) { - log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", - res.statusCode(), timestamp, finalOffset, res.bodyAsString()); - } else { - log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, - finalOffset); - } - } - - }) - .onFailure(err -> log.error("Request failed to send. Record: timestamp={}, offset={}", timestamp, finalOffset, err)); + .sendJson(httpConnectRecord); } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java index 1ca325b18d..2b2a01a9dd 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; +import io.netty.handler.codec.http.HttpResponseStatus; import io.vertx.core.Vertx; import io.vertx.core.http.HttpServer; import io.vertx.core.http.HttpServerOptions; @@ -41,6 +42,7 @@ import io.vertx.ext.web.Router; import io.vertx.ext.web.handler.LoggerHandler; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -52,22 +54,18 @@ public class HttpSourceConnector implements Source, ConnectorCreateService configClass() { @@ -106,7 +104,7 @@ private void doInit() { final Vertx vertx = Vertx.vertx(); final Router router = Router.router(vertx); - final Route route = router.route() + route = router.route() .path(this.sourceConfig.connectorConfig.getPath()) .handler(LoggerHandler.create()); @@ -136,7 +134,15 @@ public void start() { @Override public void commit(ConnectRecord record) { - + if (this.route != null && sourceConfig.getConnectorConfig().isDataConsistencyEnabled()) { + this.route.handler(ctx -> { + // Return 200 OK + ctx.response() + .putHeader("content-type", "application/json") + .setStatusCode(HttpResponseStatus.OK.code()) + .end("{\"status\":\"success\",\"recordId\":\"" + record.getRecordId() + "\"}"); + }); + } } @Override @@ -146,7 +152,15 @@ public String name() { @Override public void onException(ConnectRecord record) { - + if (this.route != null) { + this.route.failureHandler(ctx -> { + log.error("Failed to handle the request, recordId {}. ", record.getRecordId(), ctx.failure()); + // Return Bad Response + ctx.response() + .setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()) + .end("{\"status\":\"failed\",\"recordId\":\"" + record.getRecordId() + "\"}"); + }); + } } @Override diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java index 80e4f0a753..738f045237 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java @@ -45,6 +45,8 @@ public class CommonProtocol implements Protocol { public static final String PROTOCOL_NAME = "Common"; + private SourceConnectorConfig sourceConnectorConfig; + /** * Initialize the protocol * @@ -52,7 +54,7 @@ public class CommonProtocol implements Protocol { */ @Override public void initialize(SourceConnectorConfig sourceConnectorConfig) { - + this.sourceConnectorConfig = sourceConnectorConfig; } /** @@ -77,10 +79,13 @@ public void setHandler(Route route, SynchronizedCircularFifoQueue queue) throw new IllegalStateException("Failed to store the request."); } - // Return 200 OK - ctx.response() - .setStatusCode(HttpResponseStatus.OK.code()) - .end(CommonResponse.success().toJsonStr()); + if (!sourceConnectorConfig.isDataConsistencyEnabled()) { + // Return 200 OK + ctx.response() + .setStatusCode(HttpResponseStatus.OK.code()) + .end(CommonResponse.success().toJsonStr()); + } + }) .failureHandler(ctx -> { log.error("Failed to handle the request. ", ctx.failure()); From 691aab0152022ec98ef74e6252775bbd336a2f50 Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Wed, 7 Aug 2024 18:16:23 +0800 Subject: [PATCH 03/51] [ISSUE #5071] Enhancement for admin server and canal source/sink connector (#5072) * [ISSUE #5069] Enhancement for http source/sink connector * update http source connector & config * fix checkstyle error * [ISSUE #5071] Enhancement for admin server and canal source/sink connector --- eventmesh-admin-server/build.gradle | 2 +- eventmesh-admin-server/conf/application.yaml | 8 ++- eventmesh-admin-server/conf/eventmesh.sql | 7 ++- .../conf/mapper/EventMeshDataSourceMapper.xml | 5 +- .../web/db/entity/EventMeshDataSource.java | 2 + .../handler/impl/FetchJobRequestHandler.java | 4 +- .../datasource/DataSourceBizService.java | 2 + .../web/service/job/JobInfoBizService.java | 16 +++++- .../web/service/task/TaskBizService.java | 35 ++++++++++-- .../remote/datasource/DataSourceType.java | 9 +++ .../request/CreateOrUpdateDataSourceReq.java | 1 + .../remote/request/CreateTaskRequest.java | 6 +- ...e.eventmesh.common.remote.payload.IPayload | 1 + .../connector/canal/CanalConnectRecord.java | 5 +- .../sink/connector/CanalSinkConnector.java | 24 ++++++-- .../connector/canal/source/EntryParser.java | 15 +++-- .../offsetmgmt/admin/AdminOffsetService.java | 13 ++++- .../offsetmgmt/api/data/ConnectRecord.java | 53 +++--------------- .../runtime/boot/RuntimeInstance.java | 34 ++++++----- .../runtime/connector/ConnectorRuntime.java | 56 +++++++++++++------ .../src/main/resources/connector.yaml | 6 +- .../src/main/resources/runtime.yaml | 2 +- 22 files changed, 195 insertions(+), 111 deletions(-) diff --git a/eventmesh-admin-server/build.gradle b/eventmesh-admin-server/build.gradle index bdb6406da2..1fec2c7c52 100644 --- a/eventmesh-admin-server/build.gradle +++ b/eventmesh-admin-server/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-registry:eventmesh-registry-api") implementation project(":eventmesh-registry:eventmesh-registry-nacos") - implementation project(':eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api') + implementation project(":eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api") implementation "com.alibaba.nacos:nacos-client" implementation("org.springframework.boot:spring-boot-starter-web") { exclude group: "org.springframework.boot", module: "spring-boot-starter-tomcat" diff --git a/eventmesh-admin-server/conf/application.yaml b/eventmesh-admin-server/conf/application.yaml index afbcd4a438..274196db60 100644 --- a/eventmesh-admin-server/conf/application.yaml +++ b/eventmesh-admin-server/conf/application.yaml @@ -26,13 +26,17 @@ mybatis-plus: configuration: map-underscore-to-camel-case: false log-impl: org.apache.ibatis.logging.stdout.StdOutImpl +# http server port +server: + port: 8082 event-mesh: admin-server: serviceName: DEFAULT_GROUP@@em_adm_server + # grpc server port port: 8081 adminServerList: region1: - - http://localhost:8081 - region2: - http://localhost:8082 + region2: + - http://localhost:8083 region: region1 \ No newline at end of file diff --git a/eventmesh-admin-server/conf/eventmesh.sql b/eventmesh-admin-server/conf/eventmesh.sql index 94edbb6fac..bdad02a8d5 100644 --- a/eventmesh-admin-server/conf/eventmesh.sql +++ b/eventmesh-admin-server/conf/eventmesh.sql @@ -33,6 +33,7 @@ CREATE TABLE IF NOT EXISTS `event_mesh_data_source` ( `dataType` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `description` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL, `configuration` text CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, + `configurationClass` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `region` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `createUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', `updateUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', @@ -134,13 +135,13 @@ CREATE TABLE IF NOT EXISTS `event_mesh_task_info` ( -- export table eventmesh.event_mesh_verify structure CREATE TABLE IF NOT EXISTS `event_mesh_verify` ( - `id` int NOT NULL, + `id` int unsigned NOT NULL AUTO_INCREMENT, `taskID` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `recordID` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, `recordSig` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `connectorName` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, + `connectorName` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, `connectorStage` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `position` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, + `position` text COLLATE utf8mb4_general_ci DEFAULT NULL, `createTime` timestamp NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; diff --git a/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml index d100e19033..50e6ad82cc 100644 --- a/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml +++ b/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml @@ -28,6 +28,7 @@ + @@ -37,7 +38,7 @@ id,dataType,description, - configuration,region,createUid,updateUid, - createTime,updateTime + configuration,configurationClass,region, + createUid,updateUid,createTime,updateTime diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java index 9d81366aa5..e6e328984c 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java @@ -41,6 +41,8 @@ public class EventMeshDataSource implements Serializable { private String configuration; + private String configurationClass; + private String region; private String createUid; diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java index 8f159fa45b..b377bcddd8 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java @@ -53,9 +53,9 @@ public FetchJobResponse handler(FetchJobRequest request, Metadata metadata) { } response.setId(detail.getJobID()); JobConnectorConfig config = new JobConnectorConfig(); - config.setSourceConnectorConfig(JsonUtils.objectToMap(detail.getSourceDataSource())); + config.setSourceConnectorConfig(JsonUtils.objectToMap(detail.getSourceDataSource().getConf())); config.setSourceConnectorDesc(detail.getSourceConnectorDesc()); - config.setSinkConnectorConfig(JsonUtils.objectToMap(detail.getSinkDataSource())); + config.setSinkConnectorConfig(JsonUtils.objectToMap(detail.getSinkDataSource().getConf())); config.setSourceConnectorDesc(detail.getSinkConnectorDesc()); response.setConnectorConfig(config); response.setTransportType(detail.getTransportType()); diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java index 433847a4cd..4d2d670100 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java @@ -29,12 +29,14 @@ @Service public class DataSourceBizService { + @Autowired private EventMeshDataSourceService dataSourceService; public EventMeshDataSource createDataSource(CreateOrUpdateDataSourceReq dataSource) { EventMeshDataSource entity = new EventMeshDataSource(); entity.setConfiguration(JsonUtils.toJSONString(dataSource.getConfig())); + entity.setConfigurationClass(dataSource.getConfigClass()); entity.setDataType(dataSource.getType().name()); entity.setCreateUid(dataSource.getOperator()); entity.setUpdateUid(dataSource.getOperator()); diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java index ea02658481..0657383e23 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java @@ -27,6 +27,7 @@ import org.apache.eventmesh.admin.server.web.pojo.JobDetail; import org.apache.eventmesh.admin.server.web.service.datasource.DataSourceBizService; import org.apache.eventmesh.admin.server.web.service.position.PositionBizService; +import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.remote.TaskState; import org.apache.eventmesh.common.remote.TransportType; import org.apache.eventmesh.common.remote.datasource.DataSource; @@ -114,6 +115,7 @@ public List createJobs(List jobs) { source.setRegion(job.getSourceDataSource().getRegion()); source.setDesc(job.getSourceConnectorDesc()); source.setConfig(job.getSourceDataSource().getConf()); + source.setConfigClass(job.getSourceDataSource().getConfClazz().getName()); EventMeshDataSource createdSource = dataSourceBizService.createDataSource(source); entity.setSourceData(createdSource.getId()); @@ -123,6 +125,7 @@ public List createJobs(List jobs) { sink.setRegion(job.getSinkDataSource().getRegion()); sink.setDesc(job.getSinkConnectorDesc()); sink.setConfig(job.getSinkDataSource().getConf()); + sink.setConfigClass(job.getSinkDataSource().getConfClazz().getName()); EventMeshDataSource createdSink = dataSourceBizService.createDataSource(sink); entity.setTargetData(createdSink.getId()); @@ -141,18 +144,22 @@ public JobDetail getJobDetail(String jobID) { if (jobID == null) { return null; } - EventMeshJobInfo job = jobInfoService.getById(jobID); + EventMeshJobInfo job = jobInfoService.getOne(Wrappers.query().eq("jobID", jobID)); if (job == null) { return null; } JobDetail detail = new JobDetail(); + detail.setTaskID(job.getTaskID()); detail.setJobID(job.getJobID()); EventMeshDataSource source = dataSourceService.getById(job.getSourceData()); EventMeshDataSource target = dataSourceService.getById(job.getTargetData()); if (source != null) { if (!StringUtils.isBlank(source.getConfiguration())) { try { - detail.setSourceDataSource(JsonUtils.parseObject(source.getConfiguration(), DataSource.class)); + DataSource sourceDataSource = new DataSource(); + Class configClass = Class.forName(source.getConfigurationClass()); + sourceDataSource.setConf((Config) JsonUtils.parseObject(source.getConfiguration(), configClass)); + detail.setSourceDataSource(sourceDataSource); } catch (Exception e) { log.warn("parse source config id [{}] fail", job.getSourceData(), e); throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "illegal source data source config"); @@ -168,7 +175,10 @@ public JobDetail getJobDetail(String jobID) { if (target != null) { if (!StringUtils.isBlank(target.getConfiguration())) { try { - detail.setSinkDataSource(JsonUtils.parseObject(target.getConfiguration(), DataSource.class)); + DataSource sinkDataSource = new DataSource(); + Class configClass = Class.forName(target.getConfigurationClass()); + sinkDataSource.setConf((Config) JsonUtils.parseObject(target.getConfiguration(), configClass)); + detail.setSinkDataSource(sinkDataSource); } catch (Exception e) { log.warn("parse sink config id [{}] fail", job.getSourceData(), e); throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "illegal target data sink config"); diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java index f686456135..7089f9cf76 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java @@ -22,12 +22,17 @@ import org.apache.eventmesh.admin.server.web.db.service.EventMeshTaskInfoService; import org.apache.eventmesh.admin.server.web.pojo.JobDetail; import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.datasource.DataSource; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.commons.lang3.StringUtils; import java.util.List; +import java.util.Map; import java.util.Random; import java.util.UUID; import java.util.stream.Collectors; @@ -40,6 +45,7 @@ @Service public class TaskBizService { + @Autowired private EventMeshTaskInfoService taskInfoService; @@ -76,7 +82,12 @@ public String createTask(CreateTaskRequest req) { String finalTaskID = taskID; List jobs = req.getJobs().stream().map(x -> { - JobDetail job = parse(x); + JobDetail job = null; + try { + job = parse(x); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } job.setTaskID(finalTaskID); job.setCreateUid(req.getUid()); job.setUpdateUid(req.getUid()); @@ -95,14 +106,30 @@ public String createTask(CreateTaskRequest req) { return finalTaskID; } - private JobDetail parse(CreateTaskRequest.JobDetail src) { + private JobDetail parse(CreateTaskRequest.JobDetail src) throws ClassNotFoundException { JobDetail dst = new JobDetail(); dst.setJobDesc(src.getJobDesc()); dst.setTransportType(src.getTransportType()); dst.setSourceConnectorDesc(src.getSourceConnectorDesc()); - dst.setSourceDataSource(src.getSourceDataSource()); + Map sourceDataMap = src.getSourceDataSource(); + DataSource sourceDataSource = new DataSource(); + sourceDataSource.setType(DataSourceType.fromString(sourceDataMap.get("type").toString())); + sourceDataSource.setDesc((String) sourceDataMap.get("desc")); + sourceDataSource.setConfClazz((Class) Class.forName(sourceDataMap.get("confClazz").toString())); + sourceDataSource.setConf(JsonUtils.parseObject(JsonUtils.toJSONString(sourceDataMap.get("conf")), sourceDataSource.getConfClazz())); + sourceDataSource.setRegion((String) sourceDataMap.get("region")); + dst.setSourceDataSource(sourceDataSource); + dst.setSinkConnectorDesc(src.getSinkConnectorDesc()); - dst.setSinkDataSource(src.getSinkDataSource()); + Map sinkDataMap = src.getSinkDataSource(); + DataSource sinkDataSource = new DataSource(); + sinkDataSource.setType(DataSourceType.fromString(sinkDataMap.get("type").toString())); + sinkDataSource.setDesc((String) sinkDataMap.get("desc")); + sinkDataSource.setConfClazz((Class) Class.forName(sinkDataMap.get("confClazz").toString())); + sinkDataSource.setConf(JsonUtils.parseObject(JsonUtils.toJSONString(sinkDataMap.get("conf")), sinkDataSource.getConfClazz())); + sinkDataSource.setRegion((String) sinkDataMap.get("region")); + dst.setSinkDataSource(sinkDataSource); + // full/increase/check dst.setJobType(src.getJobType()); dst.setFromRegion(src.getFromRegion()); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java index 985f311b92..8c40971e7b 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java @@ -61,4 +61,13 @@ public static DataSourceType getDataSourceType(Integer index) { } return TYPES[index]; } + + public static DataSourceType fromString(String type) { + for (DataSourceType dataSourceType : DataSourceType.values()) { + if (dataSourceType.name().equalsIgnoreCase(type)) { + return dataSourceType; + } + } + throw new IllegalArgumentException("No enum constant for type: " + type); + } } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java index fadfa68e75..f78349703a 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java @@ -34,6 +34,7 @@ public class CreateOrUpdateDataSourceReq extends BaseRemoteRequest { private DataSourceType type; private String desc; private Config config; + private String configClass; private String region; private String operator; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java index 47c45595af..c895b5c440 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java @@ -18,10 +18,10 @@ package org.apache.eventmesh.common.remote.request; import org.apache.eventmesh.common.remote.TransportType; -import org.apache.eventmesh.common.remote.datasource.DataSource; import org.apache.eventmesh.common.remote.job.JobType; import java.util.List; +import java.util.Map; import lombok.Data; @@ -61,11 +61,11 @@ public static class JobDetail { // full/increase/check private JobType jobType; - private DataSource sourceDataSource; + private Map sourceDataSource; private String sourceConnectorDesc; - private DataSource sinkDataSource; + private Map sinkDataSource; private String sinkConnectorDesc; diff --git a/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload b/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload index 2af95c7510..82d5c94dd3 100644 --- a/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload +++ b/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload @@ -16,6 +16,7 @@ org.apache.eventmesh.common.remote.request.FetchJobRequest org.apache.eventmesh.common.remote.response.FetchJobResponse org.apache.eventmesh.common.remote.request.ReportPositionRequest +org.apache.eventmesh.common.remote.request.ReportVerifyRequest org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest org.apache.eventmesh.common.remote.request.FetchPositionRequest org.apache.eventmesh.common.remote.response.FetchPositionResponse \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java index 36ecd158f6..6f112081e8 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java @@ -22,13 +22,16 @@ import org.apache.eventmesh.connector.canal.model.EventColumn; import org.apache.eventmesh.connector.canal.model.EventType; +import java.io.Serializable; import java.util.ArrayList; import java.util.List; import lombok.Data; @Data -public class CanalConnectRecord { +public class CanalConnectRecord implements Serializable { + + private static final long serialVersionUID = 1L; private String schemaName; diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java index 2ecb2384ac..49fb10dd35 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java @@ -44,6 +44,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.lang3.SerializationUtils; import java.sql.PreparedStatement; import java.sql.SQLException; @@ -163,7 +164,11 @@ public void stop() { public void put(List sinkRecords) { DbLoadContext context = new DbLoadContext(); for (ConnectRecord connectRecord : sinkRecords) { - List canalConnectRecordList = (List) connectRecord.getData(); + List canalConnectRecordList = new ArrayList<>(); + // deep copy connectRecord data + for (CanalConnectRecord record : (List) connectRecord.getData()) { + canalConnectRecordList.add(SerializationUtils.clone(record)); + } canalConnectRecordList = filterRecord(canalConnectRecordList); if (isDdlDatas(canalConnectRecordList)) { doDdl(context, canalConnectRecordList, connectRecord); @@ -175,7 +180,7 @@ public void put(List sinkRecords) { DbLoadData loadData = new DbLoadData(); doBefore(canalConnectRecordList, loadData); - doLoad(context, sinkConfig, loadData); + doLoad(context, sinkConfig, loadData, connectRecord); } @@ -259,7 +264,7 @@ private void doBefore(List canalConnectRecordList, final DbL } } - private void doLoad(DbLoadContext context, CanalSinkConfig sinkConfig, DbLoadData loadData) { + private void doLoad(DbLoadContext context, CanalSinkConfig sinkConfig, DbLoadData loadData, ConnectRecord connectRecord) { List> batchDatas = new ArrayList<>(); for (TableLoadData tableData : loadData.getTables()) { if (useBatch) { @@ -271,7 +276,7 @@ private void doLoad(DbLoadContext context, CanalSinkConfig sinkConfig, DbLoadDat } } - doTwoPhase(context, sinkConfig, batchDatas, true); + doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); batchDatas.clear(); @@ -289,7 +294,7 @@ private void doLoad(DbLoadContext context, CanalSinkConfig sinkConfig, DbLoadDat } } - doTwoPhase(context, sinkConfig, batchDatas, true); + doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); batchDatas.clear(); } @@ -390,7 +395,8 @@ private boolean canBatch(CanalConnectRecord source, CanalConnectRecord target) { && StringUtils.equals(source.getSql(), target.getSql()); } - private void doTwoPhase(DbLoadContext context, CanalSinkConfig sinkConfig, List> totalRows, boolean canBatch) { + private void doTwoPhase(DbLoadContext context, CanalSinkConfig sinkConfig, List> totalRows, boolean canBatch, + ConnectRecord connectRecord) { List> results = new ArrayList<>(); for (List rows : totalRows) { if (CollectionUtils.isEmpty(rows)) { @@ -404,6 +410,9 @@ private void doTwoPhase(DbLoadContext context, CanalSinkConfig sinkConfig, List< Exception ex = null; try { ex = result.get(); + if (ex == null) { + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } } catch (Exception e) { ex = e; } @@ -433,12 +442,14 @@ private void doTwoPhase(DbLoadContext context, CanalSinkConfig sinkConfig, List< log.warn("skip exception for data : {} , caused by {}", retryRecord, ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); } } catch (Exception ex) { // do skip log.warn("skip exception for data : {} , caused by {}", retryRecord, ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); } } } else { @@ -451,6 +462,7 @@ private void doTwoPhase(DbLoadContext context, CanalSinkConfig sinkConfig, List< } catch (Exception ex) { log.error("##load phase two failed!", ex); log.error("sink connector will shutdown by " + ex.getMessage(), ex); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); executor.shutdown(); System.exit(1); } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java index 5c4303588d..75572a5faf 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java @@ -67,7 +67,7 @@ public static Map> parse(CanalSourceConfig source } } else { // if not gtid mode, need check weather the entry is loopback by specified column value - needSync = checkNeedSync(sourceConfig, rowChange.getRowDatas(0)); + needSync = checkNeedSync(sourceConfig, rowChange); if (needSync) { transactionDataBuffer.add(entry); } @@ -115,9 +115,16 @@ private static void parseRecordListWithEntryBuffer(CanalSourceConfig sourceConfi } } - private static boolean checkNeedSync(CanalSourceConfig sourceConfig, RowData rowData) { - Column markedColumn = getColumnIgnoreCase(rowData.getAfterColumnsList(), - sourceConfig.getNeedSyncMarkTableColumnName()); + private static boolean checkNeedSync(CanalSourceConfig sourceConfig, RowChange rowChange) { + Column markedColumn = null; + CanalEntry.EventType eventType = rowChange.getEventType(); + if (eventType.equals(CanalEntry.EventType.DELETE) || eventType.equals(CanalEntry.EventType.UPDATE)) { + markedColumn = getColumnIgnoreCase(rowChange.getRowDatas(0).getBeforeColumnsList(), + sourceConfig.getNeedSyncMarkTableColumnName()); + } else if (eventType.equals(CanalEntry.EventType.INSERT)) { + markedColumn = getColumnIgnoreCase(rowChange.getRowDatas(0).getAfterColumnsList(), + sourceConfig.getNeedSyncMarkTableColumnName()); + } if (markedColumn != null) { return StringUtils.equalsIgnoreCase(markedColumn.getValue(), sourceConfig.getNeedSyncMarkTableColumnValue()); diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java index 08270fc024..977661b134 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Random; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; @@ -236,7 +237,7 @@ public void initialize(OffsetStorageConfig offsetStorageConfig) { this.dataSourceType = offsetStorageConfig.getDataSourceType(); this.dataSinkType = offsetStorageConfig.getDataSinkType(); - this.adminServerAddr = offsetStorageConfig.getOffsetStorageAddr(); + this.adminServerAddr = getRandomAdminServerAddr(offsetStorageConfig.getOffsetStorageAddr()); this.channel = ManagedChannelBuilder.forTarget(adminServerAddr) .usePlaintext() .build(); @@ -274,4 +275,14 @@ public void onCompleted() { this.jobState = TaskState.RUNNING; this.jobId = offsetStorageConfig.getExtensions().get("jobId"); } + + private String getRandomAdminServerAddr(String adminServerAddrList) { + String[] addresses = adminServerAddrList.split(";"); + if (addresses.length == 0) { + throw new IllegalArgumentException("Admin server address list is empty"); + } + Random random = new Random(); + int randomIndex = random.nextInt(addresses.length); + return addresses[randomIndex]; + } } diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java index b3fc4346c4..0a41e18f7c 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java @@ -26,21 +26,30 @@ import java.util.Set; import java.util.UUID; +import lombok.Getter; +import lombok.Setter; + /** * SourceDataEntries are generated by SourceTasks and passed to specific message queue to store. */ +@Getter public class ConnectRecord { private final String recordId = UUID.randomUUID().toString(); + @Setter private Long timestamp; + @Setter private Object data; + @Setter private RecordPosition position; + @Setter private KeyValue extensions; + @Setter private SendMessageCallback callback; public ConnectRecord() { @@ -63,42 +72,6 @@ public ConnectRecord(RecordPartition recordPartition, RecordOffset recordOffset, this.data = data; } - public String getRecordId() { - return recordId; - } - - public Long getTimestamp() { - return timestamp; - } - - public void setTimestamp(Long timestamp) { - this.timestamp = timestamp; - } - - public Object getData() { - return data; - } - - public void setData(Object data) { - this.data = data; - } - - public KeyValue getExtensions() { - return extensions; - } - - public void setExtensions(KeyValue extensions) { - this.extensions = extensions; - } - - public RecordPosition getPosition() { - return position; - } - - public void setPosition(RecordPosition position) { - this.position = position; - } - public void addExtension(KeyValue extensions) { if (this.extensions == null) { this.extensions = new DefaultKeyValue(); @@ -137,14 +110,6 @@ public Object getExtensionObj(String key) { return this.extensions.getObject(key); } - public SendMessageCallback getCallback() { - return callback; - } - - public void setCallback(SendMessageCallback callback) { - this.callback = callback; - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java index acea321e95..beb1d1eedc 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java @@ -76,6 +76,8 @@ public void init() throws Exception { } // use registry adminServiceAddr value replace config runtimeInstanceConfig.setAdminServiceAddr(adminServiceAddr); + } else { + adminServiceAddr = runtimeInstanceConfig.getAdminServiceAddr(); } runtimeFactory = initRuntimeFactory(runtimeInstanceConfig); @@ -84,23 +86,25 @@ public void init() throws Exception { } public void start() throws Exception { - if (!StringUtils.isBlank(adminServiceAddr) && registryService != null) { - registryService.subscribe((event) -> { - log.info("runtime receive registry event: {}", event); - List registerServerInfoList = event.getInstances(); - Map registerServerInfoMap = new HashMap<>(); - for (RegisterServerInfo registerServerInfo : registerServerInfoList) { - registerServerInfoMap.put(registerServerInfo.getAddress(), registerServerInfo); - } - if (!registerServerInfoMap.isEmpty()) { - adminServerInfoMap = registerServerInfoMap; - updateAdminServerAddr(); - } - }, runtimeInstanceConfig.getAdminServiceName()); + if (StringUtils.isBlank(adminServiceAddr)) { + throw new RuntimeException("admin server address is empty, please check"); + } else { + if (registryService != null) { + registryService.subscribe((event) -> { + log.info("runtime receive registry event: {}", event); + List registerServerInfoList = event.getInstances(); + Map registerServerInfoMap = new HashMap<>(); + for (RegisterServerInfo registerServerInfo : registerServerInfoList) { + registerServerInfoMap.put(registerServerInfo.getAddress(), registerServerInfo); + } + if (!registerServerInfoMap.isEmpty()) { + adminServerInfoMap = registerServerInfoMap; + updateAdminServerAddr(); + } + }, runtimeInstanceConfig.getAdminServiceName()); + } runtime.start(); isStarted = true; - } else { - throw new RuntimeException("admin server address is empty, please check"); } } diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java index 1e589ebd97..501f222fd3 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java @@ -63,9 +63,12 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Random; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -132,6 +135,8 @@ public class ConnectorRuntime implements Runtime { public static final String CALLBACK_EXTENSION = "callBackExtension"; + private String adminServerAddr; + public ConnectorRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { this.runtimeInstanceConfig = runtimeInstanceConfig; @@ -149,8 +154,9 @@ public void init() throws Exception { } private void initAdminService() { + adminServerAddr = getRandomAdminServerAddr(runtimeInstanceConfig.getAdminServiceAddr()); // create gRPC channel - channel = ManagedChannelBuilder.forTarget(runtimeInstanceConfig.getAdminServiceAddr()).usePlaintext().build(); + channel = ManagedChannelBuilder.forTarget(adminServerAddr).usePlaintext().build(); adminServiceStub = AdminServiceGrpc.newStub(channel).withWaitForReady(); @@ -176,6 +182,16 @@ public void onCompleted() { requestObserver = adminServiceStub.invokeBiStream(responseObserver); } + private String getRandomAdminServerAddr(String adminServerAddrList) { + String[] addresses = adminServerAddrList.split(";"); + if (addresses.length == 0) { + throw new IllegalArgumentException("Admin server address list is empty"); + } + Random random = new Random(); + int randomIndex = random.nextInt(addresses.length); + return addresses[randomIndex]; + } + private void initStorageService() { // TODO: init producer & consumer producer = StoragePluginFactory.getMeshMQProducer(runtimeInstanceConfig.getStoragePluginType()); @@ -202,25 +218,18 @@ private void initConnectorService() throws Exception { connectorRuntimeConfig.setSinkConnectorDesc(jobResponse.getConnectorConfig().getSinkConnectorDesc()); connectorRuntimeConfig.setSinkConnectorConfig(jobResponse.getConnectorConfig().getSinkConnectorConfig()); - ConnectorCreateService sourceConnectorCreateService = - ConnectorPluginFactory.createConnector(connectorRuntimeConfig.getSourceConnectorType() + "-Source"); - sourceConnector = (Source) sourceConnectorCreateService.create(); - - SourceConfig sourceConfig = (SourceConfig) ConfigUtil.parse(connectorRuntimeConfig.getSourceConnectorConfig(), sourceConnector.configClass()); - SourceConnectorContext sourceConnectorContext = new SourceConnectorContext(); - sourceConnectorContext.setSourceConfig(sourceConfig); - sourceConnectorContext.setRuntimeConfig(connectorRuntimeConfig.getRuntimeConfig()); - sourceConnectorContext.setOffsetStorageReader(offsetStorageReader); - if (CollectionUtils.isNotEmpty(jobResponse.getPosition())) { - sourceConnectorContext.setRecordPositionList(jobResponse.getPosition()); - } - // spi load offsetMgmtService this.offsetManagement = new RecordOffsetManagement(); this.committableOffsets = RecordOffsetManagement.CommittableOffsets.EMPTY; - OffsetStorageConfig offsetStorageConfig = sourceConfig.getOffsetStorageConfig(); + OffsetStorageConfig offsetStorageConfig = new OffsetStorageConfig(); + offsetStorageConfig.setOffsetStorageAddr(connectorRuntimeConfig.getRuntimeConfig().get("offsetStorageAddr").toString()); + offsetStorageConfig.setOffsetStorageType(connectorRuntimeConfig.getRuntimeConfig().get("offsetStoragePluginType").toString()); offsetStorageConfig.setDataSourceType(jobResponse.getTransportType().getSrc()); offsetStorageConfig.setDataSinkType(jobResponse.getTransportType().getDst()); + Map offsetStorageExtensions = new HashMap<>(); + offsetStorageExtensions.put("jobId", connectorRuntimeConfig.getJobID()); + offsetStorageConfig.setExtensions(offsetStorageExtensions); + this.offsetManagementService = Optional.ofNullable(offsetStorageConfig).map(OffsetStorageConfig::getOffsetStorageType) .map(storageType -> EventMeshExtensionFactory.getExtension(OffsetManagementService.class, storageType)) .orElse(new DefaultOffsetManagementServiceImpl()); @@ -228,6 +237,18 @@ private void initConnectorService() throws Exception { this.offsetStorageWriter = new OffsetStorageWriterImpl(offsetManagementService); this.offsetStorageReader = new OffsetStorageReaderImpl(offsetManagementService); + ConnectorCreateService sourceConnectorCreateService = + ConnectorPluginFactory.createConnector(connectorRuntimeConfig.getSourceConnectorType() + "-Source"); + sourceConnector = (Source) sourceConnectorCreateService.create(); + + SourceConfig sourceConfig = (SourceConfig) ConfigUtil.parse(connectorRuntimeConfig.getSourceConnectorConfig(), sourceConnector.configClass()); + SourceConnectorContext sourceConnectorContext = new SourceConnectorContext(); + sourceConnectorContext.setSourceConfig(sourceConfig); + sourceConnectorContext.setRuntimeConfig(connectorRuntimeConfig.getRuntimeConfig()); + sourceConnectorContext.setOffsetStorageReader(offsetStorageReader); + if (CollectionUtils.isNotEmpty(jobResponse.getPosition())) { + sourceConnectorContext.setRecordPositionList(jobResponse.getPosition()); + } sourceConnector.init(sourceConnectorContext); ConnectorCreateService sinkConnectorCreateService = @@ -330,6 +351,9 @@ private void startSourceConnector() throws Exception { // TODO: use producer pub record to storage replace below if (connectorRecordList != null && !connectorRecordList.isEmpty()) { for (ConnectRecord record : connectorRecordList) { + + queue.put(record); + // if enabled incremental data reporting consistency check if (connectorRuntimeConfig.enableIncrementalDataConsistencyCheck) { reportVerifyRequest(record, connectorRuntimeConfig, ConnectorStage.SOURCE); @@ -363,8 +387,6 @@ public void onException(SendExceptionContext sendExceptionContext) { } }); - queue.put(record); - offsetManagement.awaitAllMessages(5000, TimeUnit.MILLISECONDS); // update & commit offset updateCommittableOffsets(); diff --git a/eventmesh-runtime-v2/src/main/resources/connector.yaml b/eventmesh-runtime-v2/src/main/resources/connector.yaml index 2e79e5cedc..3e407fa3e9 100644 --- a/eventmesh-runtime-v2/src/main/resources/connector.yaml +++ b/eventmesh-runtime-v2/src/main/resources/connector.yaml @@ -15,7 +15,9 @@ # limitations under the License. # -taskID: 1 -jobID: 1 +taskID: 9c18a0d2-7a61-482c-8275-34f8c2786cea +jobID: a01fd5e1-d295-4b89-99bc-0ae23eb85acf region: region1 runtimeConfig: # this used for connector runtime config + offsetStoragePluginType: admin + offsetStorageAddr: "127.0.0.1:8081;127.0.0.1:8081" \ No newline at end of file diff --git a/eventmesh-runtime-v2/src/main/resources/runtime.yaml b/eventmesh-runtime-v2/src/main/resources/runtime.yaml index c5ffac9d92..9ac36f27b0 100644 --- a/eventmesh-runtime-v2/src/main/resources/runtime.yaml +++ b/eventmesh-runtime-v2/src/main/resources/runtime.yaml @@ -21,4 +21,4 @@ registryServerAddr: 127.0.0.1:8085 registryPluginType: nacos storagePluginType: memory adminServiceName: eventmesh-admin -adminServiceAddr: "127.0.0.1:8085;127.0.0.1:8086" +adminServiceAddr: "127.0.0.1:8081;127.0.0.1:8081" From a941d0f3a24f13034d573191a2cf76e3b39687f2 Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Thu, 8 Aug 2024 11:23:21 +0800 Subject: [PATCH 04/51] [ISSUE #5073] Fix eventmesh-admin-server createTask response error (#5074) * [ISSUE #5073] Fix eventmesh-admin-server createTask response error * udpate eventmesh.sql --- eventmesh-admin-server/conf/eventmesh.sql | 112 +++++++++--------- .../admin/server/web/HttpServer.java | 6 +- .../eventmesh/admin/server/web/Response.java | 7 ++ 3 files changed, 66 insertions(+), 59 deletions(-) diff --git a/eventmesh-admin-server/conf/eventmesh.sql b/eventmesh-admin-server/conf/eventmesh.sql index bdad02a8d5..986320570a 100644 --- a/eventmesh-admin-server/conf/eventmesh.sql +++ b/eventmesh-admin-server/conf/eventmesh.sql @@ -17,134 +17,134 @@ /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET NAMES utf8 */; -/*!50503 SET NAMES utf8mb4 */; +/*!50503 SET NAMES utf8 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- export eventmesh database -CREATE DATABASE IF NOT EXISTS `eventmesh` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci */ /*!80016 DEFAULT ENCRYPTION='N' */; +CREATE DATABASE IF NOT EXISTS `eventmesh` /*!40100 DEFAULT CHARACTER SET utf8 COLLATE utf8_bin */ /*!80016 DEFAULT ENCRYPTION='N' */; USE `eventmesh`; -- export table eventmesh.event_mesh_data_source structure CREATE TABLE IF NOT EXISTS `event_mesh_data_source` ( `id` int unsigned NOT NULL AUTO_INCREMENT, - `dataType` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `description` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL, - `configuration` text CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `configurationClass` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `region` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `createUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `updateUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `dataType` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `description` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `configuration` text CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `configurationClass` varchar(200) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `region` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `updateUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`) USING BTREE -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- export table eventmesh.event_mesh_job_info structure CREATE TABLE IF NOT EXISTS `event_mesh_job_info` ( `id` int unsigned NOT NULL AUTO_INCREMENT, - `jobID` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `jobDesc` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, - `taskID` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `transportType` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `jobID` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `jobDesc` varchar(50) COLLATE utf8_bin NOT NULL, + `taskID` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `transportType` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', `sourceData` int NOT NULL DEFAULT '0', `targetData` int NOT NULL DEFAULT '0', - `jobState` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `jobType` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `fromRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `runningRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `createUid` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `updateUid` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, + `jobState` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `jobType` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `fromRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `runningRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createUid` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `updateUid` varchar(50) COLLATE utf8_bin DEFAULT NULL, `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`) USING BTREE, UNIQUE KEY `jobID` (`jobID`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- export table eventmesh.event_mesh_mysql_position structure CREATE TABLE IF NOT EXISTS `event_mesh_mysql_position` ( `id` int unsigned NOT NULL AUTO_INCREMENT, - `jobID` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `serverUUID` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL, - `address` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, + `jobID` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `serverUUID` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `address` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, `position` bigint DEFAULT NULL, - `gtid` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL, - `currentGtid` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL, + `gtid` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `currentGtid` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, `timestamp` bigint DEFAULT NULL, - `journalName` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL, + `journalName` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `jobID` (`jobID`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci ROW_FORMAT=DYNAMIC; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=DYNAMIC; -- export table eventmesh.event_mesh_position_reporter_history structure CREATE TABLE IF NOT EXISTS `event_mesh_position_reporter_history` ( `id` bigint NOT NULL AUTO_INCREMENT, - `job` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `record` text CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `address` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `job` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `record` text CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `address` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `job` (`job`), KEY `address` (`address`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci COMMENT='record position reporter changes'; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='record position reporter changes'; -- export table eventmesh.event_mesh_runtime_heartbeat structure CREATE TABLE IF NOT EXISTS `event_mesh_runtime_heartbeat` ( `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `adminAddr` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `runtimeAddr` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `jobID` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `reportTime` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'runtime local report time', + `adminAddr` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `runtimeAddr` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `jobID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `reportTime` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL COMMENT 'runtime local report time', `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `runtimeAddr` (`runtimeAddr`), KEY `jobID` (`jobID`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- export table eventmesh.event_mesh_runtime_history structure CREATE TABLE IF NOT EXISTS `event_mesh_runtime_history` ( `id` bigint NOT NULL AUTO_INCREMENT, - `job` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `address` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `job` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `address` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `address` (`address`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci ROW_FORMAT=DYNAMIC COMMENT='record runtime task change history'; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=DYNAMIC COMMENT='record runtime task change history'; -- export table eventmesh.event_mesh_task_info structure CREATE TABLE IF NOT EXISTS `event_mesh_task_info` ( `id` int unsigned NOT NULL AUTO_INCREMENT, - `taskID` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, - `taskName` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, - `taskDesc` varchar(50) COLLATE utf8mb4_general_ci NOT NULL, - `taskState` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT 'taskstate', - `sourceRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `targetRegion` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `createUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', - `updateUid` varchar(50) COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `taskID` varchar(50) COLLATE utf8_bin NOT NULL, + `taskName` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `taskDesc` varchar(50) COLLATE utf8_bin NOT NULL, + `taskState` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '' COMMENT 'taskstate', + `sourceRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `targetRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `updateUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`) USING BTREE, UNIQUE KEY `taskID` (`taskID`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- export table eventmesh.event_mesh_verify structure CREATE TABLE IF NOT EXISTS `event_mesh_verify` ( `id` int unsigned NOT NULL AUTO_INCREMENT, - `taskID` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `recordID` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `recordSig` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `connectorName` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, - `connectorStage` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, - `position` text COLLATE utf8mb4_general_ci DEFAULT NULL, + `taskID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `recordID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `recordSig` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `connectorName` varchar(200) COLLATE utf8_bin DEFAULT NULL, + `connectorStage` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `position` text COLLATE utf8_bin DEFAULT NULL, `createTime` timestamp NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java index a5daac881e..b79ac5ae82 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java @@ -19,6 +19,7 @@ import org.apache.eventmesh.admin.server.web.service.task.TaskBizService; import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.utils.JsonUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; @@ -27,18 +28,17 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; -import com.alibaba.druid.support.json.JSONUtils; - @RestController @RequestMapping("/eventmesh/admin") public class HttpServer { + @Autowired private TaskBizService taskService; @RequestMapping(value = "/createTask", method = RequestMethod.POST) public ResponseEntity createOrUpdateTask(@RequestBody CreateTaskRequest task) { String uuid = taskService.createTask(task); - return ResponseEntity.ok(JSONUtils.toJSONString(Response.success(uuid))); + return ResponseEntity.ok(JsonUtils.toJSONString(Response.success(uuid))); } public boolean deleteTask(Long id) { diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java index 329a00baae..d573c3bac4 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java @@ -19,8 +19,15 @@ import org.apache.eventmesh.common.remote.exception.ErrorCode; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter public class Response { + private int code; + private boolean success; private String desc; From 0ba3c5e5d363620b0625d146e4cb73a67fcd1473 Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Thu, 8 Aug 2024 19:18:02 +0800 Subject: [PATCH 05/51] [ISSUE #5075] update eventmesh-admin-server create task response (#5076) * [ISSUE #5073] Fix eventmesh-admin-server createTask response error * udpate eventmesh.sql * [ISSUE #5075] update eventmesh-admin-server create task response #5075 --- .../admin/server/web/HttpServer.java | 5 +- .../web/service/task/TaskBizService.java | 80 ++++++++++++++----- .../remote/request/CreateTaskRequest.java | 2 + .../remote/response/CreateTaskResponse.java | 12 +++ .../eventmesh/common/utils/JsonUtils.java | 4 + 5 files changed, 79 insertions(+), 24 deletions(-) diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java index b79ac5ae82..12afb3a3d4 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java @@ -19,6 +19,7 @@ import org.apache.eventmesh.admin.server.web.service.task.TaskBizService; import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.remote.response.CreateTaskResponse; import org.apache.eventmesh.common.utils.JsonUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -37,8 +38,8 @@ public class HttpServer { @RequestMapping(value = "/createTask", method = RequestMethod.POST) public ResponseEntity createOrUpdateTask(@RequestBody CreateTaskRequest task) { - String uuid = taskService.createTask(task); - return ResponseEntity.ok(JsonUtils.toJSONString(Response.success(uuid))); + CreateTaskResponse createTaskResponse = taskService.createTask(task); + return ResponseEntity.ok(JsonUtils.toJSONString(Response.success(createTaskResponse))); } public boolean deleteTask(Long id) { diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java index 7089f9cf76..7bc16ba4ac 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java @@ -18,6 +18,8 @@ package org.apache.eventmesh.admin.server.web.service.task; import org.apache.eventmesh.admin.server.AdminServerProperties; +import org.apache.eventmesh.admin.server.web.Response; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; import org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo; import org.apache.eventmesh.admin.server.web.db.service.EventMeshTaskInfoService; import org.apache.eventmesh.admin.server.web.pojo.JobDetail; @@ -27,10 +29,12 @@ import org.apache.eventmesh.common.remote.datasource.DataSource; import org.apache.eventmesh.common.remote.datasource.DataSourceType; import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.remote.response.CreateTaskResponse; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.commons.lang3.StringUtils; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; @@ -55,8 +59,18 @@ public class TaskBizService { @Autowired private AdminServerProperties properties; + private static final String TYPE = "type"; + + private static final String DESC = "desc"; + + private static final String CONF_CLAZZ = "confClazz"; + + private static final String CONF = "conf"; + + private static final String REGION = "region"; + @Transactional - public String createTask(CreateTaskRequest req) { + public CreateTaskResponse createTask(CreateTaskRequest req) { String taskID = req.getTaskId(); if (StringUtils.isEmpty(taskID)) { taskID = UUID.randomUUID().toString(); @@ -64,8 +78,9 @@ public String createTask(CreateTaskRequest req) { } String targetRegion = req.getTargetRegion(); + String remoteResponse = ""; // not from other admin && target not equals with self region - if (!req.isFlag() && !StringUtils.equals(properties.getRegion(), targetRegion)) { + if (!req.isFlag() && !properties.getRegion().equals(targetRegion)) { List adminServerList = properties.getAdminServerList().get(targetRegion); if (adminServerList == null || adminServerList.isEmpty()) { throw new RuntimeException("No admin server available for region: " + targetRegion); @@ -78,6 +93,7 @@ public String createTask(CreateTaskRequest req) { if (!response.getStatusCode().is2xxSuccessful()) { throw new RuntimeException("Failed to create task on admin server: " + targetUrl); } + remoteResponse = response.getBody(); } String finalTaskID = taskID; @@ -93,7 +109,7 @@ public String createTask(CreateTaskRequest req) { job.setUpdateUid(req.getUid()); return job; }).collect(Collectors.toList()); - jobInfoService.createJobs(jobs); + EventMeshTaskInfo taskInfo = new EventMeshTaskInfo(); taskInfo.setTaskID(finalTaskID); taskInfo.setTaskName(req.getTaskName()); @@ -102,8 +118,9 @@ public String createTask(CreateTaskRequest req) { taskInfo.setCreateUid(req.getUid()); taskInfo.setSourceRegion(req.getSourceRegion()); taskInfo.setTargetRegion(req.getTargetRegion()); + List eventMeshJobInfoList = jobInfoService.createJobs(jobs); taskInfoService.save(taskInfo); - return finalTaskID; + return buildCreateTaskResponse(finalTaskID, eventMeshJobInfoList, remoteResponse); } private JobDetail parse(CreateTaskRequest.JobDetail src) throws ClassNotFoundException { @@ -111,29 +128,48 @@ private JobDetail parse(CreateTaskRequest.JobDetail src) throws ClassNotFoundExc dst.setJobDesc(src.getJobDesc()); dst.setTransportType(src.getTransportType()); dst.setSourceConnectorDesc(src.getSourceConnectorDesc()); - Map sourceDataMap = src.getSourceDataSource(); - DataSource sourceDataSource = new DataSource(); - sourceDataSource.setType(DataSourceType.fromString(sourceDataMap.get("type").toString())); - sourceDataSource.setDesc((String) sourceDataMap.get("desc")); - sourceDataSource.setConfClazz((Class) Class.forName(sourceDataMap.get("confClazz").toString())); - sourceDataSource.setConf(JsonUtils.parseObject(JsonUtils.toJSONString(sourceDataMap.get("conf")), sourceDataSource.getConfClazz())); - sourceDataSource.setRegion((String) sourceDataMap.get("region")); - dst.setSourceDataSource(sourceDataSource); - + try { + dst.setSourceDataSource(mapToDataSource(src.getSourceDataSource())); + dst.setSinkDataSource(mapToDataSource(src.getSinkDataSource())); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Failed to map data source", e); + } dst.setSinkConnectorDesc(src.getSinkConnectorDesc()); - Map sinkDataMap = src.getSinkDataSource(); - DataSource sinkDataSource = new DataSource(); - sinkDataSource.setType(DataSourceType.fromString(sinkDataMap.get("type").toString())); - sinkDataSource.setDesc((String) sinkDataMap.get("desc")); - sinkDataSource.setConfClazz((Class) Class.forName(sinkDataMap.get("confClazz").toString())); - sinkDataSource.setConf(JsonUtils.parseObject(JsonUtils.toJSONString(sinkDataMap.get("conf")), sinkDataSource.getConfClazz())); - sinkDataSource.setRegion((String) sinkDataMap.get("region")); - dst.setSinkDataSource(sinkDataSource); - // full/increase/check dst.setJobType(src.getJobType()); dst.setFromRegion(src.getFromRegion()); dst.setRunningRegion(src.getRunningRegion()); return dst; } + + private DataSource mapToDataSource(Map dataMap) throws ClassNotFoundException { + DataSource dataSource = new DataSource(); + dataSource.setType(DataSourceType.fromString(dataMap.get(TYPE).toString())); + dataSource.setDesc((String) dataMap.get(DESC)); + dataSource.setConfClazz((Class) Class.forName(dataMap.get(CONF_CLAZZ).toString())); + dataSource.setConf(JsonUtils.parseObject(JsonUtils.toJSONString(dataMap.get(CONF)), dataSource.getConfClazz())); + dataSource.setRegion((String) dataMap.get(REGION)); + return dataSource; + } + + private CreateTaskResponse buildCreateTaskResponse(String taskId, List eventMeshJobInfoList, String remoteResponse) { + CreateTaskResponse createTaskResponse = new CreateTaskResponse(); + createTaskResponse.setTaskId(taskId); + List jobDetailList = new ArrayList<>(); + if (!eventMeshJobInfoList.isEmpty()) { + for (EventMeshJobInfo eventMeshJobInfo : eventMeshJobInfoList) { + CreateTaskRequest.JobDetail jobDetail = new CreateTaskRequest.JobDetail(); + jobDetail.setJobId(eventMeshJobInfo.getJobID()); + jobDetail.setRunningRegion(eventMeshJobInfo.getRunningRegion()); + jobDetailList.add(jobDetail); + } + } + if (!StringUtils.isEmpty(remoteResponse)) { + Response response = JsonUtils.parseObject(remoteResponse, Response.class); + CreateTaskResponse remoteCreateTaskResponse = JsonUtils.convertValue(response.getData(), CreateTaskResponse.class); + jobDetailList.addAll(remoteCreateTaskResponse.getJobIdList()); + } + createTaskResponse.setJobIdList(jobDetailList); + return createTaskResponse; + } } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java index c895b5c440..b09a3e10ed 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java @@ -56,6 +56,8 @@ public class CreateTaskRequest { @Data public static class JobDetail { + private String jobId; + private String jobDesc; // full/increase/check diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java index a6f5628d6f..11678dfcf0 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java @@ -17,5 +17,17 @@ package org.apache.eventmesh.common.remote.response; +import org.apache.eventmesh.common.remote.request.CreateTaskRequest; + +import java.util.List; + +import lombok.Data; + +@Data public class CreateTaskResponse extends BaseRemoteResponse { + + private String taskId; + + private List jobIdList; + } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java index bf91957032..9e9cea304d 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java @@ -54,6 +54,10 @@ public class JsonUtils { OBJECT_MAPPER.registerModule(new JavaTimeModule()); } + public static T convertValue(Object fromValue, Class toValueType) { + return OBJECT_MAPPER.convertValue(fromValue, toValueType); + } + public static T mapToObject(Map map, Class beanClass) { if (map == null) { return null; From 8cb8df531774fb017851bd5d2bae908a5098bdb2 Mon Sep 17 00:00:00 2001 From: Zaki <91261012+cnzakii@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:49:10 +0800 Subject: [PATCH 06/51] [ISSUE #5077] HTTP Sink Connector supports result callback (#5078) * feat: Support CallBack for ConnectRecord * doc: Improve some documentation * feat: Support for multi-server data callbacks * perf: Optimize some logic --- .../common/SynchronizedCircularFifoQueue.java | 3 +- .../http/sink/HttpSinkConnector.java | 10 +- .../http/sink/config/HttpRetryConfig.java | 4 +- .../http/sink/data/HttpConnectRecord.java | 44 +++- .../http/sink/data/HttpExportMetadata.java | 10 +- .../http/sink/data/HttpExportRecord.java | 6 +- .../http/sink/data/HttpExportRecordPage.java | 5 +- .../http/sink/data/HttpRetryEvent.java | 80 +++++++ .../sink/data/MultiHttpRequestContext.java | 64 ++++++ .../sink/handle/RetryHttpSinkHandler.java | 206 ------------------ .../sink/handler/AbstractHttpSinkHandler.java | 88 ++++++++ .../{handle => handler}/HttpSinkHandler.java | 14 +- .../impl}/CommonHttpSinkHandler.java | 200 +++++++++++------ .../impl/HttpSinkHandlerRetryWrapper.java | 120 ++++++++++ .../impl}/WebhookHttpSinkHandler.java | 91 ++++---- .../http/sink/HttpSinkConnectorTest.java | 2 +- 16 files changed, 603 insertions(+), 344 deletions(-) create mode 100644 eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java create mode 100644 eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java delete mode 100644 eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java create mode 100644 eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java rename eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/{handle => handler}/HttpSinkHandler.java (83%) rename eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/{handle => handler/impl}/CommonHttpSinkHandler.java (57%) create mode 100644 eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java rename eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/{handle => handler/impl}/WebhookHttpSinkHandler.java (82%) diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java index 439a9f3d78..0564e58734 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java @@ -120,9 +120,10 @@ public synchronized int size() { */ public synchronized List fetchRange(int start, int end, boolean removed) { - if (start < 0 || end > this.size() || start > end) { + if (start < 0 || start > end) { throw new IllegalArgumentException("Invalid range"); } + end = Math.min(end, this.size()); Iterator iterator = this.iterator(); List items = new ArrayList<>(end - start); diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java index 8a14756372..9b6038bdea 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java @@ -20,10 +20,10 @@ import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.connector.http.sink.config.HttpSinkConfig; import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; -import org.apache.eventmesh.connector.http.sink.handle.CommonHttpSinkHandler; -import org.apache.eventmesh.connector.http.sink.handle.HttpSinkHandler; -import org.apache.eventmesh.connector.http.sink.handle.RetryHttpSinkHandler; -import org.apache.eventmesh.connector.http.sink.handle.WebhookHttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.HttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.impl.CommonHttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.impl.HttpSinkHandlerRetryWrapper; +import org.apache.eventmesh.connector.http.sink.handler.impl.WebhookHttpSinkHandler; import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; @@ -86,7 +86,7 @@ private void doInit() { this.sinkHandler = nonRetryHandler; } else if (maxRetries > 0) { // Wrap the sink handler with a retry handler - this.sinkHandler = new RetryHttpSinkHandler(this.httpSinkConfig.connectorConfig, nonRetryHandler); + this.sinkHandler = new HttpSinkHandlerRetryWrapper(this.httpSinkConfig.connectorConfig, nonRetryHandler); } else { throw new IllegalArgumentException("Max retries must be greater than or equal to 0."); } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java index 0bceac7d47..08c3a323e7 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java @@ -24,8 +24,8 @@ public class HttpRetryConfig { // maximum number of retries, default 2, minimum 0 private int maxRetries = 2; - // retry interval, default 2000ms - private int interval = 2000; + // retry interval, default 1000ms + private int interval = 1000; // Default value is false, indicating that only requests with network-level errors will be retried. // If set to true, all failed requests will be retried, including network-level errors and non-2xx responses. diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java index a258c6ab53..95b40afe9e 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java @@ -20,31 +20,60 @@ import org.apache.eventmesh.common.remote.offset.http.HttpRecordOffset; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import java.io.Serializable; import java.time.LocalDateTime; import java.util.HashMap; import java.util.Map; import java.util.UUID; import lombok.Builder; -import lombok.Data; +import lombok.Getter; /** * a special ConnectRecord for HttpSinkConnector */ -@Data +@Getter @Builder -public class HttpConnectRecord { +public class HttpConnectRecord implements Serializable { - private String type; + private static final long serialVersionUID = 5271462532332251473L; + + /** + * The unique identifier for the HttpConnectRecord + */ + private final String httpRecordId = UUID.randomUUID().toString(); - private String time; + /** + * The time when the HttpConnectRecord was created + */ + private LocalDateTime createTime; - private String uuid; + /** + * The type of the HttpConnectRecord + */ + private String type; + /** + * The event id of the HttpConnectRecord + */ private String eventId; + /** + * The ConnectRecord to be sent + */ private ConnectRecord data; + @Override + public String toString() { + return "HttpConnectRecord{" + + "createTime=" + createTime + + ", httpRecordId='" + httpRecordId + + ", type='" + type + + ", eventId='" + eventId + + ", data=" + data + + '}'; + } + /** * Convert ConnectRecord to HttpConnectRecord * @@ -62,11 +91,8 @@ public static HttpConnectRecord convertConnectRecord(ConnectRecord record, Strin } return HttpConnectRecord.builder() .type(type) - .time(LocalDateTime.now().toString()) - .uuid(UUID.randomUUID().toString()) .eventId(type + "-" + offset) .data(record) .build(); } - } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java index 848012f152..41a5087870 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.http.sink.data; +import java.io.Serializable; import java.time.LocalDateTime; import lombok.Builder; @@ -27,7 +28,10 @@ */ @Data @Builder -public class HttpExportMetadata { +public class HttpExportMetadata implements Serializable { + + private static final long serialVersionUID = 1121010466793041920L; + private String url; private int code; @@ -36,7 +40,9 @@ public class HttpExportMetadata { private LocalDateTime receivedTime; - private String uuid; + private String httpRecordId; + + private String recordId; private String retriedBy; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java index b6382aee7a..c6bdb02884 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java @@ -17,6 +17,8 @@ package org.apache.eventmesh.connector.http.sink.data; +import java.io.Serializable; + import lombok.AllArgsConstructor; import lombok.Data; @@ -25,7 +27,9 @@ */ @Data @AllArgsConstructor -public class HttpExportRecord { +public class HttpExportRecord implements Serializable { + + private static final long serialVersionUID = 6010283911452947157L; private HttpExportMetadata metadata; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java index 5c44eb3b7f..81e582c33a 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.http.sink.data; +import java.io.Serializable; import java.util.List; import lombok.AllArgsConstructor; @@ -27,7 +28,9 @@ */ @Data @AllArgsConstructor -public class HttpExportRecordPage { +public class HttpExportRecordPage implements Serializable { + + private static final long serialVersionUID = 1143791658357035990L; private int pageNum; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java new file mode 100644 index 0000000000..4b229f9839 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.data; + +import lombok.Data; + +/** + * Single HTTP retry event + */ +@Data +public class HttpRetryEvent { + + public static final String PREFIX = "http-retry-event-"; + + private String parentId; + + private int maxRetries; + + private int currentRetries; + + private Throwable lastException; + + /** + * Increase the current retries by 1 + */ + public void increaseCurrentRetries() { + this.currentRetries++; + } + + /** + * Check if the current retries is greater than or equal to the max retries + * @return true if the current retries is greater than or equal to the max retries + */ + public boolean isMaxRetriesReached() { + return this.currentRetries >= this.maxRetries; + } + + /** + * Get the limited exception message with the default limit of 256 + * @return the limited exception message + */ + public String getLimitedExceptionMessage() { + return getLimitedExceptionMessage(256); + } + + /** + * Get the limited exception message with the specified limit + * @param maxLimit the maximum limit of the exception message + * @return the limited exception message + */ + public String getLimitedExceptionMessage(int maxLimit) { + if (lastException == null) { + return ""; + } + String message = lastException.getMessage(); + if (message == null) { + return ""; + } + if (message.length() > maxLimit) { + return message.substring(0, maxLimit); + } + return message; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java new file mode 100644 index 0000000000..67ab943818 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.data; + +import java.util.concurrent.atomic.AtomicInteger; + + +/** + * Multi HTTP request context + */ +public class MultiHttpRequestContext { + + public static final String NAME = "multi-http-request-context"; + + /** + * The remaining requests to be processed. + */ + private final AtomicInteger remainingRequests; + + /** + * The last failed event. + * If there are no retries or retries are not enabled, it will be null. + * If retries occur but still fail, it will be logged, and only the last one will be retained. + */ + private HttpRetryEvent lastFailedEvent; + + public MultiHttpRequestContext(int remainingEvents) { + this.remainingRequests = new AtomicInteger(remainingEvents); + } + + /** + * Decrement the remaining requests by 1. + */ + public void decrementRemainingRequests() { + remainingRequests.decrementAndGet(); + } + + public int getRemainingRequests() { + return remainingRequests.get(); + } + + public HttpRetryEvent getLastFailedEvent() { + return lastFailedEvent; + } + + public void setLastFailedEvent(HttpRetryEvent lastFailedEvent) { + this.lastFailedEvent = lastFailedEvent; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java deleted file mode 100644 index bc2a536107..0000000000 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.connector.http.sink.handle; - -import org.apache.eventmesh.connector.http.sink.config.HttpRetryConfig; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; -import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; -import org.apache.eventmesh.connector.http.sink.data.HttpExportMetadata; -import org.apache.eventmesh.connector.http.sink.data.HttpExportRecord; -import org.apache.eventmesh.connector.http.util.HttpUtils; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; - -import java.net.ConnectException; -import java.net.URI; -import java.time.Duration; -import java.time.LocalDateTime; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; - -import io.vertx.core.Future; -import io.vertx.core.buffer.Buffer; -import io.vertx.ext.web.client.HttpResponse; - -import lombok.extern.slf4j.Slf4j; - -import dev.failsafe.Failsafe; -import dev.failsafe.RetryPolicy; -import dev.failsafe.RetryPolicyBuilder; -import dev.failsafe.event.ExecutionEvent; - - -@Slf4j -public class RetryHttpSinkHandler implements HttpSinkHandler { - - private final SinkConnectorConfig connectorConfig; - - // Retry policy builder - private RetryPolicyBuilder> retryPolicyBuilder; - - private final List urls; - - private final HttpSinkHandler sinkHandler; - - - public RetryHttpSinkHandler(SinkConnectorConfig connectorConfig, HttpSinkHandler sinkHandler) { - this.connectorConfig = connectorConfig; - this.sinkHandler = sinkHandler; - - // Initialize retry - initRetry(); - - // Initialize URLs - String[] urlStrings = connectorConfig.getUrls(); - this.urls = Arrays.stream(urlStrings) - .map(URI::create) - .collect(Collectors.toList()); - } - - private void initRetry() { - HttpRetryConfig httpRetryConfig = this.connectorConfig.getRetryConfig(); - - this.retryPolicyBuilder = RetryPolicy.>builder() - .handleIf(e -> e instanceof ConnectException) - .handleResultIf(response -> httpRetryConfig.isRetryOnNonSuccess() && !HttpUtils.is2xxSuccessful(response.statusCode())) - .withMaxRetries(httpRetryConfig.getMaxRetries()) - .withDelay(Duration.ofMillis(httpRetryConfig.getInterval())); - } - - - /** - * Initializes the WebClient for making HTTP requests based on the provided SinkConnectorConfig. - */ - @Override - public void start() { - sinkHandler.start(); - } - - - /** - * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. - * - * @param record the ConnectRecord to process - */ - @Override - public void handle(ConnectRecord record) { - for (URI url : this.urls) { - // convert ConnectRecord to HttpConnectRecord - String type = String.format("%s.%s.%s", - this.connectorConfig.getConnectorName(), url.getScheme(), - this.connectorConfig.getWebhookConfig().isActivate() ? "webhook" : "common"); - HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - // handle the HttpConnectRecord - deliver(url, httpConnectRecord); - } - } - - - /** - * Processes HttpConnectRecord on specified URL while returning its own processing logic This method provides the retry power to process the - * HttpConnectRecord - * - * @param url URI to which the HttpConnectRecord should be sent - * @param httpConnectRecord HttpConnectRecord to process - * @return processing chain - */ - @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord) { - // Only webhook mode needs to use the UUID to identify the request - String id = httpConnectRecord.getUuid(); - - // Build the retry policy - RetryPolicy> retryPolicy = retryPolicyBuilder - .onSuccess(event -> { - if (connectorConfig.getWebhookConfig().isActivate()) { - // convert the result to an HttpExportRecord - HttpExportRecord exportRecord = covertToExportRecord(httpConnectRecord, event, event.getResult(), event.getException(), url, id); - // add the data to the queue - ((WebhookHttpSinkHandler) sinkHandler).getReceivedDataQueue().offer(exportRecord); - } - }) - .onRetry(event -> { - if (log.isDebugEnabled()) { - log.warn("Retrying the request to {} for the {} time. HttpConnectRecord= {}", url, event.getAttemptCount(), httpConnectRecord); - } else { - log.warn("Retrying the request to {} for the {} time.", url, event.getAttemptCount()); - } - if (connectorConfig.getWebhookConfig().isActivate()) { - HttpExportRecord exportRecord = - covertToExportRecord(httpConnectRecord, event, event.getLastResult(), event.getLastException(), url, id); - ((WebhookHttpSinkHandler) sinkHandler).getReceivedDataQueue().offer(exportRecord); - } - // update the HttpConnectRecord - httpConnectRecord.setTime(LocalDateTime.now().toString()); - httpConnectRecord.setUuid(UUID.randomUUID().toString()); - }) - .onFailure(event -> { - if (log.isDebugEnabled()) { - log.error("Failed to send the request to {} after {} attempts. HttpConnectRecord= {}", url, event.getAttemptCount(), - httpConnectRecord, event.getException()); - } else { - log.error("Failed to send the request to {} after {} attempts.", url, event.getAttemptCount(), event.getException()); - } - if (connectorConfig.getWebhookConfig().isActivate()) { - HttpExportRecord exportRecord = covertToExportRecord(httpConnectRecord, event, event.getResult(), event.getException(), url, id); - ((WebhookHttpSinkHandler) sinkHandler).getReceivedDataQueue().offer(exportRecord); - } - }).build(); - - // Handle the HttpConnectRecord with retry - Failsafe.with(retryPolicy) - .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord).toCompletionStage()); - - return null; - } - - /** - * Converts the ExecutionCompletedEvent to an HttpExportRecord. - * - * @param httpConnectRecord HttpConnectRecord - * @param event ExecutionEvent - * @param response the response of the request, may be null - * @param e the exception thrown during the request, may be null - * @param url the URL the request was sent to - * @param id UUID - * @return the converted HttpExportRecord - */ - private HttpExportRecord covertToExportRecord(HttpConnectRecord httpConnectRecord, ExecutionEvent event, HttpResponse response, - Throwable e, URI url, String id) { - - HttpExportMetadata httpExportMetadata = HttpExportMetadata.builder() - .url(url.toString()) - .code(response != null ? response.statusCode() : -1) - .message(response != null ? response.statusMessage() : e.getMessage()) - .receivedTime(LocalDateTime.now()) - .uuid(httpConnectRecord.getUuid()) - .retriedBy(event.getAttemptCount() > 1 ? id : null) - .retryNum(event.getAttemptCount() - 1).build(); - - return new HttpExportRecord(httpExportMetadata, response == null ? null : response.bodyAsString()); - } - - /** - * Cleans up and releases resources used by the HTTP/HTTPS handler. - */ - @Override - public void stop() { - sinkHandler.stop(); - } -} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java new file mode 100644 index 0000000000..36d01115bb --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.handler; + +import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; +import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; +import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.net.URI; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +/** + * AbstractHttpSinkHandler is an abstract class that provides a base implementation for HttpSinkHandler. + */ +public abstract class AbstractHttpSinkHandler implements HttpSinkHandler { + + private final SinkConnectorConfig sinkConnectorConfig; + + private final List urls; + + protected AbstractHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { + this.sinkConnectorConfig = sinkConnectorConfig; + // Initialize URLs + String[] urlStrings = sinkConnectorConfig.getUrls(); + this.urls = Arrays.stream(urlStrings) + .map(URI::create) + .collect(Collectors.toList()); + } + + public SinkConnectorConfig getSinkConnectorConfig() { + return sinkConnectorConfig; + } + + public List getUrls() { + return urls; + } + + /** + * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. + * + * @param record the ConnectRecord to process + */ + @Override + public void handle(ConnectRecord record) { + // build attributes + Map attributes = new ConcurrentHashMap<>(); + attributes.put(MultiHttpRequestContext.NAME, new MultiHttpRequestContext(urls.size())); + + // send the record to all URLs + for (URI url : urls) { + // convert ConnectRecord to HttpConnectRecord + String type = String.format("%s.%s.%s", + this.sinkConnectorConfig.getConnectorName(), url.getScheme(), + this.sinkConnectorConfig.getWebhookConfig().isActivate() ? "webhook" : "common"); + HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); + + // add retry event to attributes + HttpRetryEvent retryEvent = new HttpRetryEvent(); + retryEvent.setMaxRetries(sinkConnectorConfig.getRetryConfig().getMaxRetries()); + attributes.put(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId(), retryEvent); + + // deliver the record + deliver(url, httpConnectRecord, attributes); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/HttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java similarity index 83% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/HttpSinkHandler.java rename to eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java index 09fd66a762..1731809ab9 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/HttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java @@ -15,12 +15,13 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.handle; +package org.apache.eventmesh.connector.http.sink.handler; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.net.URI; +import java.util.Map; import io.vertx.core.Future; import io.vertx.core.buffer.Buffer; @@ -32,14 +33,14 @@ * *

Any class that needs to process ConnectRecords via HTTP or HTTPS should implement this interface. * Implementing classes must provide implementations for the {@link #start()}, {@link #handle(ConnectRecord)}, - * {@link #deliver(URI, HttpConnectRecord)}, and {@link #stop()} methods.

+ * {@link #deliver(URI, HttpConnectRecord, Map)}, and {@link #stop()} methods.

* *

Implementing classes should ensure thread safety and handle HTTP/HTTPS communication efficiently. * The {@link #start()} method initializes any necessary resources for HTTP/HTTPS communication. The {@link #handle(ConnectRecord)} method processes a - * ConnectRecord by sending it over HTTP or HTTPS. The {@link #deliver(URI, HttpConnectRecord)} method processes HttpConnectRecord on specified URL - * while returning its own processing logic {@link #stop()} method releases any resources used for HTTP/HTTPS communication.

+ * ConnectRecord by sending it over HTTP or HTTPS. The {@link #deliver(URI, HttpConnectRecord, Map)} method processes HttpConnectRecord on specified + * URL while returning its own processing logic {@link #stop()} method releases any resources used for HTTP/HTTPS communication.

* - *

It's recommended to handle exceptions gracefully within the {@link #deliver(URI, HttpConnectRecord)} method + *

It's recommended to handle exceptions gracefully within the {@link #deliver(URI, HttpConnectRecord, Map)} method * to prevent message loss or processing interruptions.

*/ public interface HttpSinkHandler { @@ -62,9 +63,10 @@ public interface HttpSinkHandler { * * @param url URI to which the HttpConnectRecord should be sent * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to be used in processing * @return processing chain */ - Future> deliver(URI url, HttpConnectRecord httpConnectRecord); + Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes); /** * Cleans up and releases resources used by the HTTP/HTTPS handler. This method should be called when the handler is no longer needed. diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java similarity index 57% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java rename to eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java index 4bc365a139..0907847455 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java @@ -15,23 +15,23 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.handle; +package org.apache.eventmesh.connector.http.sink.handler.impl; import org.apache.eventmesh.common.remote.offset.http.HttpRecordOffset; import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; +import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; +import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; +import org.apache.eventmesh.connector.http.sink.handler.AbstractHttpSinkHandler; import org.apache.eventmesh.connector.http.util.HttpUtils; import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.net.URI; -import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import io.netty.handler.codec.http.HttpHeaderNames; import io.vertx.core.Future; @@ -60,22 +60,13 @@ */ @Slf4j @Getter -public class CommonHttpSinkHandler implements HttpSinkHandler { - - private final SinkConnectorConfig connectorConfig; - - private final List urls; +public class CommonHttpSinkHandler extends AbstractHttpSinkHandler { private WebClient webClient; public CommonHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { - this.connectorConfig = sinkConnectorConfig; - // Initialize URLs - String[] urlStrings = sinkConnectorConfig.getUrls(); - this.urls = Arrays.stream(urlStrings) - .map(URI::create) - .collect(Collectors.toList()); + super(sinkConnectorConfig); } /** @@ -91,41 +82,57 @@ public void start() { * Initializes the WebClient with the provided configuration options. */ private void doInitWebClient() { + SinkConnectorConfig sinkConnectorConfig = getSinkConnectorConfig(); final Vertx vertx = Vertx.vertx(); WebClientOptions options = new WebClientOptions() - .setKeepAlive(this.connectorConfig.isKeepAlive()) - .setKeepAliveTimeout(this.connectorConfig.getKeepAliveTimeout() / 1000) - .setIdleTimeout(this.connectorConfig.getIdleTimeout()) + .setKeepAlive(sinkConnectorConfig.isKeepAlive()) + .setKeepAliveTimeout(sinkConnectorConfig.getKeepAliveTimeout() / 1000) + .setIdleTimeout(sinkConnectorConfig.getIdleTimeout()) .setIdleTimeoutUnit(TimeUnit.MILLISECONDS) - .setConnectTimeout(this.connectorConfig.getConnectionTimeout()) - .setMaxPoolSize(this.connectorConfig.getMaxConnectionPoolSize()); + .setConnectTimeout(sinkConnectorConfig.getConnectionTimeout()) + .setMaxPoolSize(sinkConnectorConfig.getMaxConnectionPoolSize()); this.webClient = WebClient.create(vertx, options); } /** - * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. + * Processes HttpConnectRecord on specified URL while returning its own processing logic. This method sends the HttpConnectRecord to the specified + * URL using the WebClient. * - * @param record the ConnectRecord to process + * @param url URI to which the HttpConnectRecord should be sent + * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to be used in processing + * @return processing chain */ @Override - public void handle(ConnectRecord record) { - for (URI url : this.urls) { - // convert ConnectRecord to HttpConnectRecord - String type = String.format("%s.%s.%s", connectorConfig.getConnectorName(), url.getScheme(), "common"); - HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - // get timestamp and offset - Long timestamp = httpConnectRecord.getData().getTimestamp(); - Map offset = null; - try { - // May throw NullPointerException. - offset = ((HttpRecordOffset) httpConnectRecord.getData().getPosition().getRecordOffset()).getOffsetMap(); - } catch (NullPointerException e) { - // ignore null pointer exception - } - final Map finalOffset = offset; - Future> responseFuture = deliver(url, httpConnectRecord); - responseFuture.onSuccess(res -> { + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes) { + // create headers + MultiMap headers = HttpHeaders.headers() + .set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=utf-8") + .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8"); + + // get timestamp and offset + Long timestamp = httpConnectRecord.getData().getTimestamp(); + Map offset = null; + try { + // May throw NullPointerException. + offset = ((HttpRecordOffset) httpConnectRecord.getData().getPosition().getRecordOffset()).getOffsetMap(); + } catch (NullPointerException e) { + // ignore null pointer exception + } + final Map finalOffset = offset; + + // send the request + return this.webClient.post(url.getPath()) + .host(url.getHost()) + .port(url.getPort() == -1 ? (Objects.equals(url.getScheme(), "https") ? 443 : 80) : url.getPort()) + .putHeaders(headers) + .ssl(Objects.equals(url.getScheme(), "https")) + .sendJson(httpConnectRecord) + .onSuccess(res -> { log.info("Request sent successfully. Record: timestamp={}, offset={}", timestamp, finalOffset); + + Exception e = null; + // log the response if (HttpUtils.is2xxSuccessful(res.statusCode())) { if (log.isDebugEnabled()) { @@ -135,7 +142,6 @@ public void handle(ConnectRecord record) { log.info("Received successful response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, finalOffset); } - record.getCallback().onSuccess(convertToSendResult(record)); } else { if (log.isDebugEnabled()) { log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", @@ -144,14 +150,96 @@ public void handle(ConnectRecord record) { log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, finalOffset); } - record.getCallback() - .onException(buildSendExceptionContext(record, new RuntimeException("HTTP response code: " + res.statusCode()))); + + e = new RuntimeException("Unexpected HTTP response code: " + res.statusCode()); } + + // try callback + tryCallback(httpConnectRecord, e, attributes); }).onFailure(err -> { log.error("Request failed to send. Record: timestamp={}, offset={}", timestamp, finalOffset, err); - record.getCallback().onException(buildSendExceptionContext(record, err)); + + // try callback + tryCallback(httpConnectRecord, err, attributes); }); + } + + /** + * Tries to call the callback based on the result of the request. + * + * @param httpConnectRecord the HttpConnectRecord to use + * @param e the exception thrown during the request, may be null + * @param attributes additional attributes to be used in processing + */ + private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map attributes) { + // get the retry event + HttpRetryEvent retryEvent = getAndUpdateRetryEvent(attributes, httpConnectRecord, e); + + // get the multi http request context + MultiHttpRequestContext multiHttpRequestContext = getAndUpdateMultiHttpRequestContext(attributes, retryEvent); + + if (multiHttpRequestContext.getRemainingRequests() == 0) { + // do callback + ConnectRecord record = httpConnectRecord.getData(); + if (record.getCallback() == null) { + if (log.isDebugEnabled()) { + log.warn("ConnectRecord callback is null. Ignoring callback. {}", record); + } else { + log.warn("ConnectRecord callback is null. Ignoring callback."); + } + return; + } + + HttpRetryEvent lastFailedEvent = multiHttpRequestContext.getLastFailedEvent(); + if (lastFailedEvent == null) { + // success + record.getCallback().onSuccess(convertToSendResult(record)); + } else { + // failure + record.getCallback().onException(buildSendExceptionContext(record, lastFailedEvent.getLastException())); + } + } + } + + /** + * Gets and updates the retry event based on the provided attributes and HttpConnectRecord. + * + * @param attributes the attributes to use + * @param httpConnectRecord the HttpConnectRecord to use + * @param e the exception thrown during the request, may be null + * @return the updated retry event + */ + private HttpRetryEvent getAndUpdateRetryEvent(Map attributes, HttpConnectRecord httpConnectRecord, Throwable e) { + // get the retry event + HttpRetryEvent retryEvent = (HttpRetryEvent) attributes.get(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + // update the retry event + retryEvent.setLastException(e); + return retryEvent; + } + + + /** + * Gets and updates the multi http request context based on the provided attributes and HttpConnectRecord. + * + * @param attributes the attributes to use + * @param retryEvent the retry event to use + * @return the updated multi http request context + */ + private MultiHttpRequestContext getAndUpdateMultiHttpRequestContext(Map attributes, HttpRetryEvent retryEvent) { + // get the multi http request context + MultiHttpRequestContext multiHttpRequestContext = (MultiHttpRequestContext) attributes.get(MultiHttpRequestContext.NAME); + + if (retryEvent.getLastException() == null || retryEvent.isMaxRetriesReached()) { + // decrement the counter + multiHttpRequestContext.decrementRemainingRequests(); + + // try set failed event + if (retryEvent.getLastException() != null) { + multiHttpRequestContext.setLastFailedEvent(retryEvent); + } } + + return multiHttpRequestContext; } private SendResult convertToSendResult(ConnectRecord record) { @@ -174,30 +262,6 @@ private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Thr } - /** - * Processes HttpConnectRecord on specified URL while returning its own processing logic. This method sends the HttpConnectRecord to the specified - * URL using the WebClient. - * - * @param url URI to which the HttpConnectRecord should be sent - * @param httpConnectRecord HttpConnectRecord to process - * @return processing chain - */ - @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord) { - // create headers - MultiMap headers = HttpHeaders.headers() - .set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=utf-8") - .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8"); - // send the request - return this.webClient.post(url.getPath()) - .host(url.getHost()) - .port(url.getPort() == -1 ? (Objects.equals(url.getScheme(), "https") ? 443 : 80) : url.getPort()) - .putHeaders(headers) - .ssl(Objects.equals(url.getScheme(), "https")) - .sendJson(httpConnectRecord); - } - - /** * Cleans up and releases resources used by the HTTP/HTTPS handler. */ @@ -209,6 +273,4 @@ public void stop() { log.warn("WebClient is null, ignore."); } } - - } \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java new file mode 100644 index 0000000000..268d0a0d6d --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.handler.impl; + +import org.apache.eventmesh.connector.http.sink.config.HttpRetryConfig; +import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; +import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; +import org.apache.eventmesh.connector.http.sink.handler.AbstractHttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.HttpSinkHandler; +import org.apache.eventmesh.connector.http.util.HttpUtils; + +import java.net.ConnectException; +import java.net.URI; +import java.time.Duration; +import java.util.Map; + +import io.vertx.core.Future; +import io.vertx.core.buffer.Buffer; +import io.vertx.ext.web.client.HttpResponse; + +import lombok.extern.slf4j.Slf4j; + +import dev.failsafe.Failsafe; +import dev.failsafe.RetryPolicy; + + +/** + * HttpSinkHandlerRetryWrapper is a wrapper class for the HttpSinkHandler that provides retry functionality for failed HTTP requests. + */ +@Slf4j +public class HttpSinkHandlerRetryWrapper extends AbstractHttpSinkHandler { + + private final HttpRetryConfig httpRetryConfig; + + private final HttpSinkHandler sinkHandler; + + public HttpSinkHandlerRetryWrapper(SinkConnectorConfig sinkConnectorConfig, HttpSinkHandler sinkHandler) { + super(sinkConnectorConfig); + this.sinkHandler = sinkHandler; + this.httpRetryConfig = getSinkConnectorConfig().getRetryConfig(); + } + + /** + * Initializes the WebClient for making HTTP requests based on the provided SinkConnectorConfig. + */ + @Override + public void start() { + sinkHandler.start(); + } + + + /** + * Processes HttpConnectRecord on specified URL while returning its own processing logic This method provides the retry power to process the + * HttpConnectRecord + * + * @param url URI to which the HttpConnectRecord should be sent + * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to pass to the processing chain + * @return processing chain + */ + @Override + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes) { + + // Build the retry policy + RetryPolicy> retryPolicy = RetryPolicy.>builder() + .handleIf(e -> e instanceof ConnectException) + .handleResultIf(response -> httpRetryConfig.isRetryOnNonSuccess() && !HttpUtils.is2xxSuccessful(response.statusCode())) + .withMaxRetries(httpRetryConfig.getMaxRetries()) + .withDelay(Duration.ofMillis(httpRetryConfig.getInterval())) + .onRetry(event -> { + if (log.isDebugEnabled()) { + log.warn("Retrying the request to {} for the {} time. {}", url, event.getAttemptCount(), httpConnectRecord); + } else { + log.warn("Retrying the request to {} for the {} time.", url, event.getAttemptCount()); + } + // update the retry event + HttpRetryEvent retryEvent = (HttpRetryEvent) attributes.get(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + retryEvent.increaseCurrentRetries(); + }) + .onFailure(event -> { + if (log.isDebugEnabled()) { + log.error("Failed to send the request to {} after {} attempts. {}", url, event.getAttemptCount(), + httpConnectRecord, event.getException()); + } else { + log.error("Failed to send the request to {} after {} attempts.", url, event.getAttemptCount(), event.getException()); + } + }).build(); + + // Handle the ConnectRecord with retry policy + Failsafe.with(retryPolicy) + .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord, attributes).toCompletionStage()); + + return null; + } + + + /** + * Cleans up and releases resources used by the HTTP/HTTPS handler. + */ + @Override + public void stop() { + sinkHandler.stop(); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/WebhookHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/WebhookHttpSinkHandler.java rename to eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java index 4e64126a9d..ff8f69d45a 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/WebhookHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.handle; +package org.apache.eventmesh.connector.http.sink.handler.impl; import org.apache.eventmesh.common.exception.EventMeshException; import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; @@ -25,13 +25,14 @@ import org.apache.eventmesh.connector.http.sink.data.HttpExportMetadata; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecord; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecordPage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.commons.lang3.StringUtils; import java.net.URI; import java.time.LocalDateTime; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -61,8 +62,6 @@ @Slf4j public class WebhookHttpSinkHandler extends CommonHttpSinkHandler { - private final SinkConnectorConfig sinkConnectorConfig; - // the configuration for webhook private final HttpWebhookConfig webhookConfig; @@ -86,7 +85,7 @@ public boolean isExportDestroyed() { public WebhookHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { super(sinkConnectorConfig); - this.sinkConnectorConfig = sinkConnectorConfig; + this.webhookConfig = sinkConnectorConfig.getWebhookConfig(); int maxQueueSize = this.webhookConfig.getMaxStorageSize(); this.receivedDataQueue = new SynchronizedCircularFifoQueue<>(maxQueueSize); @@ -94,9 +93,6 @@ public WebhookHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { doInitExportServer(); } - public SynchronizedCircularFifoQueue getReceivedDataQueue() { - return receivedDataQueue; - } /** * Initialize the server for exporting the received data @@ -202,22 +198,6 @@ public void start() { }); } - /** - * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. - * - * @param record the ConnectRecord to process - */ - @Override - public void handle(ConnectRecord record) { - for (URI url : super.getUrls()) { - // convert ConnectRecord to HttpConnectRecord - String type = String.format("%s.%s.%s", this.getConnectorConfig().getConnectorName(), url.getScheme(), "webhook"); - HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - // handle the HttpConnectRecord - deliver(url, httpConnectRecord); - } - } - /** * Processes HttpConnectRecord on specified URL while returning its own processing logic This method sends the HttpConnectRecord to the specified @@ -225,30 +205,27 @@ public void handle(ConnectRecord record) { * * @param url URI to which the HttpConnectRecord should be sent * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to be used in processing * @return processing chain */ @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord) { + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes) { // send the request - Future> responseFuture = super.deliver(url, httpConnectRecord); + Future> responseFuture = super.deliver(url, httpConnectRecord, attributes); // store the received data return responseFuture.onComplete(arr -> { - // If open retry, return directly and handled by RetryHttpSinkHandler - if (sinkConnectorConfig.getRetryConfig().getMaxRetries() > 0) { - return; + // get tryEvent from attributes + HttpRetryEvent retryEvent = (HttpRetryEvent) attributes.get(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + + HttpResponse response = null; + if (arr.succeeded()) { + response = arr.result(); + } else { + retryEvent.setLastException(arr.cause()); } - // create ExportMetadataBuilder - HttpResponse response = arr.succeeded() ? arr.result() : null; - - HttpExportMetadata httpExportMetadata = HttpExportMetadata.builder() - .url(url.toString()) - .code(response != null ? response.statusCode() : -1) - .message(response != null ? response.statusMessage() : arr.cause().getMessage()) - .receivedTime(LocalDateTime.now()) - .retriedBy(null) - .uuid(httpConnectRecord.getUuid()) - .retryNum(0) - .build(); + + // create ExportMetadata + HttpExportMetadata httpExportMetadata = buildHttpExportMetadata(url, response, httpConnectRecord, retryEvent); // create ExportRecord HttpExportRecord exportRecord = new HttpExportRecord(httpExportMetadata, arr.succeeded() ? arr.result().bodyAsString() : null); @@ -257,6 +234,38 @@ public Future> deliver(URI url, HttpConnectRecord httpConne }); } + /** + * Builds the HttpExportMetadata object based on the response, HttpConnectRecord, and HttpRetryEvent. + * + * @param url the URI to which the HttpConnectRecord was sent + * @param response the response received from the URI + * @param httpConnectRecord the HttpConnectRecord that was sent + * @param retryEvent the SingleHttpRetryEvent that was used for retries + * @return the HttpExportMetadata object + */ + private HttpExportMetadata buildHttpExportMetadata(URI url, HttpResponse response, HttpConnectRecord httpConnectRecord, + HttpRetryEvent retryEvent) { + + String msg = null; + // order of precedence: lastException > response > null + if (retryEvent.getLastException() != null) { + msg = retryEvent.getLimitedExceptionMessage(); + retryEvent.setLastException(null); + } else if (response != null) { + msg = response.statusMessage(); + } + + return HttpExportMetadata.builder() + .url(url.toString()) + .code(response != null ? response.statusCode() : -1) + .message(msg) + .receivedTime(LocalDateTime.now()) + .httpRecordId(httpConnectRecord.getHttpRecordId()) + .recordId(httpConnectRecord.getData().getRecordId()) + .retryNum(retryEvent.getCurrentRetries()) + .build(); + } + /** * Cleans up and releases resources used by the HTTP/HTTPS handler. diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java index 3e724627c0..7ddba511c4 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java @@ -86,7 +86,7 @@ void before() throws Exception { JSONObject requestBody = JSON.parseObject(httpRequest.getBodyAsString()); return HttpResponse.response() .withContentType(MediaType.APPLICATION_JSON) - .withStatusCode(200) + .withStatusCode(HttpStatus.SC_OK) .withBody(new JSONObject() .fluentPut("code", 0) .fluentPut("message", "success") From 9701f02660ba04ff37bd0b5787b614a3c91d8bfd Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Wed, 21 Aug 2024 11:26:23 +0800 Subject: [PATCH 07/51] [ISSUE #5079] Enhancement update for admin-server (#5080) * [ISSUE #5079] Enhancement update for admin-server * fix check style error * fix check style error --- eventmesh-admin-server/bin/start-admin.sh | 51 +++++++------ eventmesh-admin-server/build.gradle | 2 + eventmesh-admin-server/conf/application.yaml | 8 +- eventmesh-admin-server/conf/eventmesh.sql | 2 +- .../conf/mapper/EventMeshVerifyMapper.xml | 5 +- .../admin/server/web/HttpServer.java | 23 ++++++ .../admin/server/web/db/DBThreadPool.java | 26 ++++++- .../server/web/db/entity/EventMeshVerify.java | 3 + .../handler/impl/FetchJobRequestHandler.java | 2 +- .../handler/impl/ReportJobRequestHandler.java | 59 +++++++++++++++ .../handler/impl/ReportPositionHandler.java | 2 + .../web/handler/impl/ReportVerifyHandler.java | 49 +++++++++++- .../web/service/job/JobInfoBizService.java | 74 ++++++++++++++++++- .../position/impl/HttpPositionHandler.java | 61 +++++++++++++++ .../web/service/verify/VerifyBizService.java | 2 + .../eventmesh/common/remote/JobState.java | 52 +++++++++++++ .../common/remote/TransportType.java | 1 + .../remote/request/ReportJobRequest.java | 37 ++++++++++ .../remote/request/ReportVerifyRequest.java | 2 + ...e.eventmesh.common.remote.payload.IPayload | 1 + .../offsetmgmt/admin/AdminOffsetService.java | 3 + 21 files changed, 423 insertions(+), 42 deletions(-) create mode 100644 eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java create mode 100644 eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java create mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java create mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java diff --git a/eventmesh-admin-server/bin/start-admin.sh b/eventmesh-admin-server/bin/start-admin.sh index 93c3644397..1633036617 100644 --- a/eventmesh-admin-server/bin/start-admin.sh +++ b/eventmesh-admin-server/bin/start-admin.sh @@ -56,34 +56,34 @@ function extract_java_version { #} function get_pid { - local ppid="" - if [ -f ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file ]; then - ppid=$(cat ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file) - # If the process does not exist, it indicates that the previous process terminated abnormally. + local ppid="" + if [ -f ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file ]; then + ppid=$(cat ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file) + # If the process does not exist, it indicates that the previous process terminated abnormally. if [ ! -d /proc/$ppid ]; then # Remove the residual file. rm ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file echo -e "ERROR\t EventMesh process had already terminated unexpectedly before, please check log output." ppid="" fi - else - if [[ $OS =~ Msys ]]; then - # There is a Bug on Msys that may not be able to kill the identified process - ppid=`jps -v | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep java | grep -v grep | awk -F ' ' {'print $1'}` - elif [[ $OS =~ Darwin ]]; then - # Known problem: grep Java may not be able to accurately identify Java processes - ppid=$(/bin/ps -o user,pid,command | grep "java" | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep -Ev "^root" |awk -F ' ' {'print $2'}) - else - if [ $DOCKER ]; then - # No need to exclude root user in Docker containers. - ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_ADMIN_HOME | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | awk -F ' ' {'print $2'}) - else + else + if [[ $OS =~ Msys ]]; then + # There is a Bug on Msys that may not be able to kill the identified process + ppid=`jps -v | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep java | grep -v grep | awk -F ' ' {'print $1'}` + elif [[ $OS =~ Darwin ]]; then + # Known problem: grep Java may not be able to accurately identify Java processes + ppid=$(/bin/ps -o user,pid,command | grep "java" | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep -Ev "^root" |awk -F ' ' {'print $2'}) + else + if [ $DOCKER ]; then + # No need to exclude root user in Docker containers. + ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_ADMIN_HOME | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | awk -F ' ' {'print $2'}) + else # It is required to identify the process as accurately as possible on Linux. ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_ADMIN_HOME | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep -Ev "^root" | awk -F ' ' {'print $2'}) fi - fi - fi - echo "$ppid"; + fi + fi + echo "$ppid"; } #=========================================================================================== @@ -136,8 +136,7 @@ export JAVA_HOME GC_LOG_FILE="${EVENTMESH_ADMIN_LOG_HOME}/eventmesh_admin_gc_%p.log" -#JAVA_OPT="${JAVA_OPT} -server -Xms2048M -Xmx4096M -Xmn2048m -XX:SurvivorRatio=4" -JAVA_OPT=`cat ${EVENTMESH_ADMIN_HOME}/conf/server.env | grep APP_START_JVM_OPTION::: | awk -F ':::' {'print $2'}` +JAVA_OPT="${JAVA_OPT} -server -Xms1g -Xmx1g" JAVA_OPT="${JAVA_OPT} -XX:+UseG1GC -XX:G1HeapRegionSize=16m -XX:G1ReservePercent=25 -XX:InitiatingHeapOccupancyPercent=30 -XX:SoftRefLRUPolicyMSPerMB=0 -XX:SurvivorRatio=8 -XX:MaxGCPauseMillis=50" JAVA_OPT="${JAVA_OPT} -verbose:gc" if [[ "$JAVA_VERSION" == "8" ]]; then @@ -172,7 +171,7 @@ JAVA_OPT="${JAVA_OPT} -DeventMeshPluginDir=${EVENTMESH_ADMIN_HOME}/plugin" # echo "proxy is running already" # exit 9; # else -# echo "err pid$pid, rm pid.file" +# echo "err pid$pid, rm pid.file" # rm pid.file # fi #fi @@ -183,8 +182,8 @@ if [[ $pid == "ERROR"* ]]; then exit 9 fi if [ -n "$pid" ]; then - echo -e "ERROR\t The server is already running (pid=$pid), there is no need to execute start.sh again." - exit 9 + echo -e "ERROR\t The server is already running (pid=$pid), there is no need to execute start.sh again." + exit 9 fi make_logs_dir @@ -193,9 +192,9 @@ echo "Using Java version: $JAVA_VERSION, path: $JAVA" >> ${EVENTMESH_ADMIN_LOG_H EVENTMESH_ADMIN_MAIN=org.apache.eventmesh.admin.server.ExampleAdminServer if [ $DOCKER ]; then - $JAVA $JAVA_OPT -classpath ${EVENTMESH_ADMIN_HOME}/conf:${EVENTMESH_ADMIN_HOME}/apps/*:${EVENTMESH_ADMIN_HOME}/lib/* $EVENTMESH_ADMIN_MAIN >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out + $JAVA $JAVA_OPT -classpath ${EVENTMESH_ADMIN_HOME}/conf:${EVENTMESH_ADMIN_HOME}/apps/*:${EVENTMESH_ADMIN_HOME}/lib/* $EVENTMESH_ADMIN_MAIN >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out else - $JAVA $JAVA_OPT -classpath ${EVENTMESH_ADMIN_HOME}/conf:${EVENTMESH_ADMIN_HOME}/apps/*:${EVENTMESH_ADMIN_HOME}/lib/* $EVENTMESH_ADMIN_MAIN >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out 2>&1 & + $JAVA $JAVA_OPT -classpath ${EVENTMESH_ADMIN_HOME}/conf:${EVENTMESH_ADMIN_HOME}/apps/*:${EVENTMESH_ADMIN_HOME}/lib/* $EVENTMESH_ADMIN_MAIN >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out 2>&1 & echo $!>${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file fi exit 0 diff --git a/eventmesh-admin-server/build.gradle b/eventmesh-admin-server/build.gradle index 1fec2c7c52..95c8fa1372 100644 --- a/eventmesh-admin-server/build.gradle +++ b/eventmesh-admin-server/build.gradle @@ -38,6 +38,8 @@ dependencies { implementation "com.alibaba:druid-spring-boot-starter" compileOnly 'com.mysql:mysql-connector-j' compileOnly 'org.projectlombok:lombok' + testImplementation 'junit:junit:4.12' + testImplementation 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' } diff --git a/eventmesh-admin-server/conf/application.yaml b/eventmesh-admin-server/conf/application.yaml index 274196db60..3d702e579e 100644 --- a/eventmesh-admin-server/conf/application.yaml +++ b/eventmesh-admin-server/conf/application.yaml @@ -35,8 +35,8 @@ event-mesh: # grpc server port port: 8081 adminServerList: - region1: + R1: - http://localhost:8082 - region2: - - http://localhost:8083 - region: region1 \ No newline at end of file + R2: + - http://localhost:8082 + region: R1 \ No newline at end of file diff --git a/eventmesh-admin-server/conf/eventmesh.sql b/eventmesh-admin-server/conf/eventmesh.sql index 986320570a..6e28daca8a 100644 --- a/eventmesh-admin-server/conf/eventmesh.sql +++ b/eventmesh-admin-server/conf/eventmesh.sql @@ -102,7 +102,6 @@ CREATE TABLE IF NOT EXISTS `event_mesh_runtime_heartbeat` ( `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - UNIQUE KEY `runtimeAddr` (`runtimeAddr`), KEY `jobID` (`jobID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -137,6 +136,7 @@ CREATE TABLE IF NOT EXISTS `event_mesh_task_info` ( CREATE TABLE IF NOT EXISTS `event_mesh_verify` ( `id` int unsigned NOT NULL AUTO_INCREMENT, `taskID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `jobID` varchar(50) COLLATE utf8_bin DEFAULT NULL, `recordID` varchar(50) COLLATE utf8_bin DEFAULT NULL, `recordSig` varchar(50) COLLATE utf8_bin DEFAULT NULL, `connectorName` varchar(200) COLLATE utf8_bin DEFAULT NULL, diff --git a/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml index b7b042145a..45727498cc 100644 --- a/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml +++ b/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml @@ -26,6 +26,7 @@ + @@ -35,8 +36,8 @@ - id,taskID,recordID, - recordSig,connectorName,connectorStage, + id,taskID,jobID,recordID, + recordSig,connectorName,connectorStage, position,createTime diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java index 12afb3a3d4..2454e9f02c 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java @@ -18,7 +18,9 @@ package org.apache.eventmesh.admin.server.web; import org.apache.eventmesh.admin.server.web.service.task.TaskBizService; +import org.apache.eventmesh.admin.server.web.service.verify.VerifyBizService; import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.remote.request.ReportVerifyRequest; import org.apache.eventmesh.common.remote.response.CreateTaskResponse; import org.apache.eventmesh.common.utils.JsonUtils; @@ -29,19 +31,40 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; +import lombok.extern.slf4j.Slf4j; + @RestController @RequestMapping("/eventmesh/admin") +@Slf4j public class HttpServer { @Autowired private TaskBizService taskService; + @Autowired + private VerifyBizService verifyService; + @RequestMapping(value = "/createTask", method = RequestMethod.POST) public ResponseEntity createOrUpdateTask(@RequestBody CreateTaskRequest task) { + log.info("receive http proto create task:{}", task); CreateTaskResponse createTaskResponse = taskService.createTask(task); + log.info("receive http proto create task result:{}", createTaskResponse); return ResponseEntity.ok(JsonUtils.toJSONString(Response.success(createTaskResponse))); } + + @RequestMapping(value = "/reportVerify", method = RequestMethod.POST) + public ResponseEntity reportVerify(@RequestBody ReportVerifyRequest request) { + log.info("receive http proto report verify request:{}", request); + boolean result = verifyService.reportVerifyRecord(request); + log.info("receive http proto report verify result:{}", result); + if (result) { + return ResponseEntity.ok("report verify success.request:" + JsonUtils.toJSONString(request)); + } else { + return ResponseEntity.internalServerError().body("report verify success.request:" + JsonUtils.toJSONString(request)); + } + } + public boolean deleteTask(Long id) { return false; } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java index f1de764967..277ea66656 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java @@ -20,6 +20,7 @@ import org.apache.eventmesh.common.EventMeshThreadFactory; import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; @@ -39,17 +40,34 @@ public class DBThreadPool { new LinkedBlockingQueue<>(1000), new EventMeshThreadFactory("admin-server-db"), new ThreadPoolExecutor.DiscardOldestPolicy()); + + private final ScheduledThreadPoolExecutor checkScheduledExecutor = + new ScheduledThreadPoolExecutor(Runtime.getRuntime().availableProcessors(), new EventMeshThreadFactory("admin-server-check-scheduled"), + new ThreadPoolExecutor.DiscardOldestPolicy()); + @PreDestroy private void destroy() { if (!executor.isShutdown()) { try { executor.shutdown(); if (!executor.awaitTermination(30, TimeUnit.SECONDS)) { - log.info("wait heart beat handler thread pool shutdown timeout, it will shutdown immediately"); + log.info("wait handler thread pool shutdown timeout, it will shutdown immediately"); executor.shutdownNow(); } } catch (InterruptedException e) { - log.warn("wait heart beat handler thread pool shutdown fail"); + log.warn("wait handler thread pool shutdown fail"); + } + } + + if (!checkScheduledExecutor.isShutdown()) { + try { + checkScheduledExecutor.shutdown(); + if (!checkScheduledExecutor.awaitTermination(30, TimeUnit.SECONDS)) { + log.info("wait scheduled thread pool shutdown timeout, it will shutdown immediately"); + checkScheduledExecutor.shutdownNow(); + } + } catch (InterruptedException e) { + log.warn("wait scheduled thread pool shutdown fail"); } } } @@ -57,4 +75,8 @@ private void destroy() { public ThreadPoolExecutor getExecutors() { return executor; } + + public ScheduledThreadPoolExecutor getCheckExecutor() { + return checkScheduledExecutor; + } } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java index 5425c5c57b..c5a6c35f8d 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java @@ -32,11 +32,14 @@ @TableName(value = "event_mesh_verify") @Data public class EventMeshVerify implements Serializable { + @TableId(type = IdType.AUTO) private Integer id; private String taskID; + private String jobID; + private String recordID; private String recordSig; diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java index b377bcddd8..3392084c28 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java @@ -56,7 +56,7 @@ public FetchJobResponse handler(FetchJobRequest request, Metadata metadata) { config.setSourceConnectorConfig(JsonUtils.objectToMap(detail.getSourceDataSource().getConf())); config.setSourceConnectorDesc(detail.getSourceConnectorDesc()); config.setSinkConnectorConfig(JsonUtils.objectToMap(detail.getSinkDataSource().getConf())); - config.setSourceConnectorDesc(detail.getSinkConnectorDesc()); + config.setSinkConnectorDesc(detail.getSinkConnectorDesc()); response.setConnectorConfig(config); response.setTransportType(detail.getTransportType()); response.setState(detail.getState()); diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java new file mode 100644 index 0000000000..ea836ce7aa --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.ReportJobRequest; +import org.apache.eventmesh.common.remote.response.SimpleResponse; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class ReportJobRequestHandler extends BaseRequestHandler { + + @Autowired + JobInfoBizService jobInfoBizService; + + @Override + public SimpleResponse handler(ReportJobRequest request, Metadata metadata) { + log.info("receive report job request:{}", request); + if (StringUtils.isBlank(request.getJobID())) { + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal job id, it's empty"); + } + EventMeshJobInfo jobInfo = jobInfoBizService.getJobInfo(request.getJobID()); + if (jobInfo == null) { + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal job id, not exist target job,jobID:" + request.getJobID()); + } + boolean result = jobInfoBizService.updateJobState(jobInfo.getJobID(), request.getState()); + if (result) { + return SimpleResponse.success(); + } else { + return SimpleResponse.fail(ErrorCode.INTERNAL_ERR, "update job failed."); + } + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java index 5e2a968262..7a30bef80a 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java @@ -37,6 +37,7 @@ @Component @Slf4j public class ReportPositionHandler extends BaseRequestHandler { + @Autowired private JobInfoBizService jobInfoBizService; @@ -48,6 +49,7 @@ public class ReportPositionHandler extends BaseRequestHandler { + @Autowired private VerifyBizService verifyService; + @Autowired + JobInfoBizService jobInfoBizService; + + @Autowired + private AdminServerProperties properties; + @Override protected SimpleResponse handler(ReportVerifyRequest request, Metadata metadata) { - if (StringUtils.isAnyBlank(request.getTaskID(), request.getRecordSig(), request.getRecordID(), request.getConnectorStage())) { + if (StringUtils.isAnyBlank(request.getTaskID(), request.getJobID(), request.getRecordSig(), request.getRecordID(), + request.getConnectorStage())) { log.info("report verify request [{}] illegal", request); - return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "request task id, sign, record id or stage is none"); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "request task id,job id, sign, record id or stage is none"); + } + + String jobID = request.getJobID(); + EventMeshJobInfo jobInfo = jobInfoBizService.getJobInfo(jobID); + if (jobInfo == null || StringUtils.isBlank(jobInfo.getFromRegion())) { + log.info("report verify job info [{}] illegal", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "job info is null or fromRegion is blank,job id:" + jobID); } - return verifyService.reportVerifyRecord(request) ? SimpleResponse.success() : SimpleResponse.fail(ErrorCode.INTERNAL_ERR, "save verify " + + String fromRegion = jobInfo.getFromRegion(); + String localRegion = properties.getRegion(); + log.info("report verify request from region:{},localRegion:{},request:{}", fromRegion, localRegion, request); + if (fromRegion.equalsIgnoreCase(localRegion)) { + return verifyService.reportVerifyRecord(request) ? SimpleResponse.success() : SimpleResponse.fail(ErrorCode.INTERNAL_ERR, "save verify " + "request fail"); + } else { + log.info("start transfer report verify to from region admin server. from region:{}", fromRegion); + List adminServerList = properties.getAdminServerList().get(fromRegion); + if (adminServerList == null || adminServerList.isEmpty()) { + throw new RuntimeException("No admin server available for region: " + fromRegion); + } + String targetUrl = adminServerList.get(new Random().nextInt(adminServerList.size())) + "/eventmesh/admin/reportVerify"; + RestTemplate restTemplate = new RestTemplate(); + ResponseEntity response = restTemplate.postForEntity(targetUrl, request, String.class); + if (!response.getStatusCode().is2xxSuccessful()) { + return SimpleResponse.fail(ErrorCode.INTERNAL_ERR, + "save verify request fail,code:" + response.getStatusCode() + ",msg:" + response.getBody()); + } + return SimpleResponse.success(); + } } } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java index 0657383e23..a8b469d8b7 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java @@ -19,15 +19,19 @@ import org.apache.eventmesh.admin.server.AdminServerProperties; import org.apache.eventmesh.admin.server.AdminServerRuntimeException; +import org.apache.eventmesh.admin.server.web.db.DBThreadPool; import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; import org.apache.eventmesh.admin.server.web.db.service.EventMeshDataSourceService; import org.apache.eventmesh.admin.server.web.db.service.EventMeshJobInfoExtService; import org.apache.eventmesh.admin.server.web.db.service.EventMeshJobInfoService; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshRuntimeHeartbeatService; import org.apache.eventmesh.admin.server.web.pojo.JobDetail; import org.apache.eventmesh.admin.server.web.service.datasource.DataSourceBizService; import org.apache.eventmesh.admin.server.web.service.position.PositionBizService; import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.remote.JobState; import org.apache.eventmesh.common.remote.TaskState; import org.apache.eventmesh.common.remote.TransportType; import org.apache.eventmesh.common.remote.datasource.DataSource; @@ -38,9 +42,13 @@ import org.apache.commons.lang3.StringUtils; +import java.time.Duration; import java.util.LinkedList; import java.util.List; import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import javax.annotation.PostConstruct; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -75,13 +83,41 @@ public class JobInfoBizService { @Autowired private AdminServerProperties properties; + @Autowired + EventMeshRuntimeHeartbeatService heartbeatService; + + private final long heatBeatPeriod = Duration.ofMillis(5000).toMillis(); + + @Autowired + DBThreadPool executor; + + @PostConstruct + public void init() { + log.info("init check job info scheduled task."); + executor.getCheckExecutor().scheduleAtFixedRate(new Runnable() { + @Override + public void run() { + checkJobInfo(); + } + }, 10, 10, TimeUnit.SECONDS); + } + public boolean updateJobState(String jobID, TaskState state) { if (jobID == null || state == null) { return false; } EventMeshJobInfo jobInfo = new EventMeshJobInfo(); jobInfo.setJobState(state.name()); - return jobInfoService.update(jobInfo, Wrappers.update().eq("jobID", jobID).ne("state", TaskState.DELETE.name())); + return jobInfoService.update(jobInfo, Wrappers.update().eq("jobID", jobID).ne("jobState", JobState.DELETE.name())); + } + + public boolean updateJobState(String jobID, JobState state) { + if (jobID == null || state == null) { + return false; + } + EventMeshJobInfo jobInfo = new EventMeshJobInfo(); + jobInfo.setJobState(state.name()); + return jobInfoService.update(jobInfo, Wrappers.update().eq("jobID", jobID).ne("jobState", JobState.DELETE.name())); } @Transactional @@ -114,7 +150,8 @@ public List createJobs(List jobs) { source.setOperator(job.getCreateUid()); source.setRegion(job.getSourceDataSource().getRegion()); source.setDesc(job.getSourceConnectorDesc()); - source.setConfig(job.getSourceDataSource().getConf()); + Config sourceConfig = job.getSourceDataSource().getConf(); + source.setConfig(sourceConfig); source.setConfigClass(job.getSourceDataSource().getConfClazz().getName()); EventMeshDataSource createdSource = dataSourceBizService.createDataSource(source); entity.setSourceData(createdSource.getId()); @@ -124,7 +161,8 @@ public List createJobs(List jobs) { sink.setOperator(job.getCreateUid()); sink.setRegion(job.getSinkDataSource().getRegion()); sink.setDesc(job.getSinkConnectorDesc()); - sink.setConfig(job.getSinkDataSource().getConf()); + Config sinkConfig = job.getSinkDataSource().getConf(); + sink.setConfig(sinkConfig); sink.setConfigClass(job.getSinkDataSource().getConfClazz().getName()); EventMeshDataSource createdSink = dataSourceBizService.createDataSource(sink); entity.setTargetData(createdSink.getId()); @@ -195,6 +233,36 @@ public JobDetail getJobDetail(String jobID) { detail.setTransportType(TransportType.getTransportType(job.getTransportType())); return detail; } + + public EventMeshJobInfo getJobInfo(String jobID) { + if (jobID == null) { + return null; + } + EventMeshJobInfo job = jobInfoService.getOne(Wrappers.query().eq("jobID", jobID)); + return job; + } + + public void checkJobInfo() { + List eventMeshJobInfoList = jobInfoService.list(Wrappers.query().eq("jobState", JobState.RUNNING.name())); + log.info("start check job info.to check job size:{}", eventMeshJobInfoList.size()); + for (EventMeshJobInfo jobInfo : eventMeshJobInfoList) { + String jobID = jobInfo.getJobID(); + if (StringUtils.isEmpty(jobID)) { + continue; + } + EventMeshRuntimeHeartbeat heartbeat = heartbeatService.getOne(Wrappers.query().eq("jobID", jobID)); + if (heartbeat == null) { + continue; + } + // if last heart beat update time have delay three period.print job heart beat delay warn + long currentTimeStamp = System.currentTimeMillis(); + if (currentTimeStamp - heartbeat.getUpdateTime().getTime() > 3 * heatBeatPeriod) { + log.warn("current job heart heart has delay.jobID:{},currentTimeStamp:{},last update time:{}", jobID, currentTimeStamp, + heartbeat.getUpdateTime()); + } + } + } + } diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java new file mode 100644 index 0000000000..b8d536f388 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position.impl; + +import org.apache.eventmesh.admin.server.web.db.service.EventMeshPositionReporterHistoryService; +import org.apache.eventmesh.admin.server.web.service.position.PositionHandler; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class HttpPositionHandler extends PositionHandler { + + @Autowired + EventMeshPositionReporterHistoryService historyService; + + @Override + protected DataSourceType getSourceType() { + return DataSourceType.HTTP; + } + + @Override + public boolean handler(ReportPositionRequest request, Metadata metadata) { + log.info("receive http position report request:{}", request); + // mock wemq postion report store + return true; + } + + @Override + public List handler(FetchPositionRequest request, Metadata metadata) { + // mock http position fetch request + List recordPositionList = new ArrayList<>(); + return recordPositionList; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java index 74f208b199..e4f08b30cc 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java @@ -26,6 +26,7 @@ @Service public class VerifyBizService { + @Autowired private EventMeshVerifyService verifyService; @@ -35,6 +36,7 @@ public boolean reportVerifyRecord(ReportVerifyRequest request) { verify.setRecordSig(request.getRecordSig()); verify.setPosition(request.getPosition()); verify.setTaskID(request.getTaskID()); + verify.setJobID(request.getJobID()); verify.setConnectorName(request.getConnectorName()); verify.setConnectorStage(request.getConnectorStage()); return verifyService.save(verify); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java new file mode 100644 index 0000000000..da9daffe9c --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote; + +import java.util.HashMap; +import java.util.Map; + +import lombok.ToString; + +@ToString +public enum JobState { + INIT, RUNNING, COMPLETE, DELETE, FAIL; + private static final JobState[] STATES_NUM_INDEX = JobState.values(); + private static final Map STATES_NAME_INDEX = new HashMap<>(); + + static { + for (JobState jobState : STATES_NUM_INDEX) { + STATES_NAME_INDEX.put(jobState.name(), jobState); + } + } + + public static JobState fromIndex(Integer index) { + if (index == null || index < 0 || index >= STATES_NUM_INDEX.length) { + return null; + } + + return STATES_NUM_INDEX[index]; + } + + public static JobState fromIndex(String index) { + if (index == null || index.isEmpty()) { + return null; + } + + return STATES_NAME_INDEX.get(index); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java index 82e7bc021d..6b43598398 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java @@ -35,6 +35,7 @@ public enum TransportType { HTTP_REDIS(DataSourceType.HTTP, DataSourceType.REDIS), HTTP_ROCKETMQ(DataSourceType.HTTP, DataSourceType.ROCKETMQ), REDIS_MQ(DataSourceType.REDIS, DataSourceType.ROCKETMQ), + HTTP_HTTP(DataSourceType.HTTP, DataSourceType.HTTP), ; private static final Map INDEX_TYPES = new HashMap<>(); private static final TransportType[] TYPES = TransportType.values(); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java new file mode 100644 index 0000000000..aec33e4616 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.remote.JobState; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@Data +@EqualsAndHashCode(callSuper = true) +@ToString +public class ReportJobRequest extends BaseRemoteRequest { + + private String jobID; + + private JobState state; + + private String address; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java index cd541949f4..bd38881c3d 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java @@ -28,6 +28,8 @@ public class ReportVerifyRequest extends BaseRemoteRequest { private String taskID; + private String jobID; + private String recordID; private String recordSig; diff --git a/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload b/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload index 82d5c94dd3..433cf57ed1 100644 --- a/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload +++ b/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload @@ -16,6 +16,7 @@ org.apache.eventmesh.common.remote.request.FetchJobRequest org.apache.eventmesh.common.remote.response.FetchJobResponse org.apache.eventmesh.common.remote.request.ReportPositionRequest +org.apache.eventmesh.common.remote.request.ReportJobRequest org.apache.eventmesh.common.remote.request.ReportVerifyRequest org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest org.apache.eventmesh.common.remote.request.FetchPositionRequest diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java index 977661b134..993352a979 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java @@ -112,6 +112,8 @@ public void persist() { reportPositionRequest.setRecordPositionList(recordToSyncList); + log.debug("start report position request: {}", JsonUtils.toJSONString(reportPositionRequest)); + Metadata metadata = Metadata.newBuilder() .setType(ReportPositionRequest.class.getSimpleName()) .build(); @@ -121,6 +123,7 @@ public void persist() { .build()) .build(); requestObserver.onNext(payload); + log.debug("end report position request: {}", JsonUtils.toJSONString(reportPositionRequest)); for (Map.Entry entry : recordMap.entrySet()) { positionStore.remove(entry.getKey()); From 60fb430ad7e2418ce8fd2d0e7b55bb507ea4ff7d Mon Sep 17 00:00:00 2001 From: mike_xwm Date: Wed, 21 Aug 2024 20:26:35 +0800 Subject: [PATCH 08/51] [ISSUE #5081] Enhancement update for connectors & admin-server (#5082) * [ISSUE #5079] Enhancement update for admin-server * fix check style error * fix check style error * [ISSUE #5081] Enhancement update for connectors & admin-server * fix check style error * fix check style error --- .../web/service/job/JobInfoBizService.java | 3 + .../common/config/ConfigService.java | 11 +- .../connector/http}/HttpRetryConfig.java | 2 +- .../connector/http}/HttpSinkConfig.java | 2 +- .../connector/http}/HttpWebhookConfig.java | 2 +- .../connector/http}/SinkConnectorConfig.java | 11 +- .../connector/http/SourceConnectorConfig.java | 6 +- .../connector/rdb/canal/CanalSinkConfig.java | 25 +- .../rdb/canal/CanalSinkFullConfig.java | 2 +- .../rdb/canal/CanalSinkIncrementConfig.java | 50 + .../rdb/canal/CanalSourceConfig.java | 58 +- .../rdb/canal/CanalSourceFullConfig.java | 2 +- .../rdb/canal/CanalSourceIncrementConfig.java | 86 ++ .../eventmesh/common/utils/JsonUtils.java | 11 + .../SqlBuilderLoadInterceptor.java | 16 +- .../connector/CanalSinkCheckConnector.java | 406 ++++++++ .../sink/connector/CanalSinkConnector.java | 778 +--------------- .../connector/CanalSinkFullConnector.java | 43 +- .../CanalSinkIncrementConnector.java | 865 ++++++++++++++++++ .../connector/canal/source/EntryParser.java | 18 +- .../source/connector/CanalFullProducer.java | 8 +- .../connector/CanalSourceCheckConnector.java | 186 ++++ .../connector/CanalSourceConnector.java | 319 +------ .../connector/CanalSourceFullConnector.java | 26 +- .../CanalSourceIncrementConnector.java | 383 ++++++++ .../source/position/CanalFullPositionMgr.java | 4 +- .../http/sink/HttpSinkConnector.java | 4 +- .../http/sink/data/HttpConnectRecord.java | 40 +- .../sink/handler/AbstractHttpSinkHandler.java | 4 +- .../http/sink/handler/HttpSinkHandler.java | 10 +- .../handler/impl/CommonHttpSinkHandler.java | 61 +- .../impl/HttpSinkHandlerRetryWrapper.java | 10 +- .../handler/impl/WebhookHttpSinkHandler.java | 13 +- .../http/source/data/WebhookRequest.java | 4 + .../source/protocol/impl/CommonProtocol.java | 31 +- .../source/protocol/impl/GitHubProtocol.java | 2 +- ...esh.openconnect.api.ConnectorCreateService | 20 + .../http/sink/HttpSinkConnectorTest.java | 4 +- .../api/connector/SinkConnectorContext.java | 7 + .../api/connector/SourceConnectorContext.java | 3 + .../offsetmgmt/api/data/DefaultKeyValue.java | 5 + eventmesh-runtime-v2/build.gradle | 1 + .../runtime/connector/ConnectorRuntime.java | 101 +- 43 files changed, 2366 insertions(+), 1277 deletions(-) rename {eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config => eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http}/HttpRetryConfig.java (95%) rename {eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config => eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http}/HttpSinkConfig.java (94%) rename {eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config => eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http}/HttpWebhookConfig.java (95%) rename {eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config => eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http}/SinkConnectorConfig.java (84%) create mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java create mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java create mode 100644 eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java create mode 100644 eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java create mode 100644 eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java create mode 100644 eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java create mode 100644 eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java index a8b469d8b7..76df629e69 100644 --- a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java @@ -37,6 +37,7 @@ import org.apache.eventmesh.common.remote.datasource.DataSource; import org.apache.eventmesh.common.remote.datasource.DataSourceType; import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.job.JobType; import org.apache.eventmesh.common.remote.request.CreateOrUpdateDataSourceReq; import org.apache.eventmesh.common.utils.JsonUtils; @@ -231,6 +232,8 @@ public JobDetail getJobDetail(String jobID) { } detail.setState(state); detail.setTransportType(TransportType.getTransportType(job.getTransportType())); + detail.setJobType(JobType.fromIndex(job.getJobType())); + detail.setJobDesc(job.getJobDesc()); return detail; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java index 939c9d8d67..3f3f609a1f 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java @@ -131,7 +131,7 @@ public T getConfig(ConfigInfo configInfo) throws IOException { } else { filePath = path.startsWith(FILE_PATH_PREFIX) ? path.substring(FILE_PATH_PREFIX.length()) : this.configPath + path; } - + filePath = normalizeFilePath(filePath); if (filePath.contains(".jar")) { try (final InputStream inputStream = getClass().getResourceAsStream(Objects.requireNonNull(resourceUrl))) { if (inputStream == null) { @@ -152,6 +152,15 @@ public T getConfig(ConfigInfo configInfo) throws IOException { return (T) object; } + private String normalizeFilePath(String filePath) { + if (System.getProperty("os.name").toLowerCase().contains("win")) { + if (filePath.startsWith("/")) { + filePath = filePath.substring(1); + } + } + return filePath; + } + private void populateConfig(Object object, Class clazz, Config config) throws NoSuchFieldException, IOException, IllegalAccessException { ConfigInfo configInfo = new ConfigInfo(); diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpRetryConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpRetryConfig.java index 08c3a323e7..319732a875 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpRetryConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSinkConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSinkConfig.java index 5997b90b7d..3c429f3355 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSinkConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; import org.apache.eventmesh.common.config.connector.SinkConfig; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpWebhookConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpWebhookConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpWebhookConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpWebhookConfig.java index f15bac4568..96b9e09826 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpWebhookConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpWebhookConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SinkConnectorConfig.java similarity index 84% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SinkConnectorConfig.java index 9bb338cceb..ccebe5a998 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SinkConnectorConfig.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; -import io.vertx.core.http.HttpClientOptions; import lombok.Data; @@ -29,19 +28,19 @@ public class SinkConnectorConfig { private String[] urls; // keepAlive, default true - private boolean keepAlive = HttpClientOptions.DEFAULT_KEEP_ALIVE; + private boolean keepAlive = true; // timeunit: ms, default 60000ms - private int keepAliveTimeout = HttpClientOptions.DEFAULT_KEEP_ALIVE_TIMEOUT * 1000; // Keep units consistent + private int keepAliveTimeout = 60 * 1000; // Keep units consistent // timeunit: ms, default 5000ms, recommended scope: 5000ms - 10000ms private int connectionTimeout = 5000; // timeunit: ms, default 5000ms - private int idleTimeout; + private int idleTimeout = 5000; // maximum number of HTTP/1 connections a client will pool, default 5 - private int maxConnectionPoolSize = HttpClientOptions.DEFAULT_MAX_POOL_SIZE; + private int maxConnectionPoolSize = 5; // retry config private HttpRetryConfig retryConfig = new HttpRetryConfig(); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java index b7f075e6d3..58d910bf2d 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java @@ -27,7 +27,7 @@ public class SourceConnectorConfig { private String connectorName; - private String path; + private String path = "/"; private int port; @@ -51,11 +51,11 @@ public class SourceConnectorConfig { private int batchSize = 10; // protocol, default CloudEvent - private String protocol = "CloudEvent"; + private String protocol = "Common"; // extra config, e.g. GitHub secret private Map extraConfig = new HashMap<>(); // data consistency enabled, default true - private boolean dataConsistencyEnabled = true; + private boolean dataConsistencyEnabled = false; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java index 026f33f4fc..c535c7f52a 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java @@ -18,7 +18,8 @@ package org.apache.eventmesh.common.config.connector.rdb.canal; import org.apache.eventmesh.common.config.connector.SinkConfig; -import org.apache.eventmesh.common.remote.job.SyncMode; + +import java.util.Map; import lombok.Data; import lombok.EqualsAndHashCode; @@ -27,25 +28,7 @@ @EqualsAndHashCode(callSuper = true) public class CanalSinkConfig extends SinkConfig { - // batchSize - private Integer batchSize = 50; - - // enable batch - private Boolean useBatch = true; - - // sink thread size for single channel - private Integer poolSize = 5; - - // sync mode: field/row - private SyncMode syncMode; - - private boolean isGTIDMode = true; - - private boolean isMariaDB = true; - - // skip sink process exception - private Boolean skipException = false; - - public SinkConnectorConfig sinkConnectorConfig; + // used to convert canal full/increment/check connector config + private Map sinkConfig; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java index c2b881df6c..f1d78a65dc 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java @@ -26,6 +26,6 @@ @Data @EqualsAndHashCode(callSuper = true) public class CanalSinkFullConfig extends SinkConfig { - private SinkConnectorConfig sinkConfig; + private SinkConnectorConfig sinkConnectorConfig; private String zeroDate; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java new file mode 100644 index 0000000000..32112a769b --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.remote.job.SyncMode; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSinkIncrementConfig extends CanalSinkConfig { + + // batchSize + private Integer batchSize = 50; + + // enable batch + private Boolean useBatch = true; + + // sink thread size for single channel + private Integer poolSize = 5; + + // sync mode: field/row + private SyncMode syncMode; + + private boolean isGTIDMode = true; + + private boolean isMariaDB = true; + + // skip sink process exception + private Boolean skipException = false; + + public SinkConnectorConfig sinkConnectorConfig; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java index 8331d32cb7..db17fbe75d 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java @@ -18,11 +18,8 @@ package org.apache.eventmesh.common.config.connector.rdb.canal; import org.apache.eventmesh.common.config.connector.SourceConfig; -import org.apache.eventmesh.common.remote.job.SyncConsistency; -import org.apache.eventmesh.common.remote.job.SyncMode; -import org.apache.eventmesh.common.remote.offset.RecordPosition; -import java.util.List; +import java.util.Map; import lombok.Data; import lombok.EqualsAndHashCode; @@ -31,56 +28,7 @@ @EqualsAndHashCode(callSuper = true) public class CanalSourceConfig extends SourceConfig { - private String destination; + // used to convert canal full/increment/check connector config + private Map sourceConfig; - private Long canalInstanceId; - - private String desc; - - private boolean ddlSync = true; - - private boolean filterTableError = false; - - private Long slaveId; - - private Short clientId; - - private String serverUUID; - - private boolean isMariaDB = true; - - private boolean isGTIDMode = true; - - private Integer batchSize = 10000; - - private Long batchTimeout = -1L; - - private String tableFilter; - - private String fieldFilter; - - private List recordPositions; - - // ================================= channel parameter - // ================================ - - // enable remedy - private Boolean enableRemedy = false; - - // sync mode: field/row - private SyncMode syncMode; - - // sync consistency - private SyncConsistency syncConsistency; - - // ================================= system parameter - // ================================ - - // Column name of the bidirectional synchronization mark - private String needSyncMarkTableColumnName = "needSync"; - - // Column value of the bidirectional synchronization mark - private String needSyncMarkTableColumnValue = "needSync"; - - private SourceConnectorConfig sourceConnectorConfig; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java index a2ab8ba31d..15398b303a 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java @@ -28,7 +28,7 @@ @Data @EqualsAndHashCode(callSuper = true) public class CanalSourceFullConfig extends SourceConfig { - private SourceConnectorConfig connectorConfig; + private SourceConnectorConfig sourceConnectorConfig; private List startPosition; private int parallel; private int flushSize; diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java new file mode 100644 index 0000000000..94fe007b5f --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.remote.job.SyncConsistency; +import org.apache.eventmesh.common.remote.job.SyncMode; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSourceIncrementConfig extends CanalSourceConfig { + + private String destination; + + private Long canalInstanceId; + + private String desc; + + private boolean ddlSync = true; + + private boolean filterTableError = false; + + private Long slaveId; + + private Short clientId; + + private String serverUUID; + + private boolean isMariaDB = true; + + private boolean isGTIDMode = true; + + private Integer batchSize = 10000; + + private Long batchTimeout = -1L; + + private String tableFilter; + + private String fieldFilter; + + private List recordPositions; + + // ================================= channel parameter + // ================================ + + // enable remedy + private Boolean enableRemedy = false; + + // sync mode: field/row + private SyncMode syncMode; + + // sync consistency + private SyncConsistency syncConsistency; + + // ================================= system parameter + // ================================ + + // Column name of the bidirectional synchronization mark + private String needSyncMarkTableColumnName = "needSync"; + + // Column value of the bidirectional synchronization mark + private String needSyncMarkTableColumnValue = "needSync"; + + private SourceConnectorConfig sourceConnectorConfig; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java index 9e9cea304d..f2328541c4 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java @@ -58,6 +58,10 @@ public static T convertValue(Object fromValue, Class toValueType) { return OBJECT_MAPPER.convertValue(fromValue, toValueType); } + public static T convertValue(Object fromValue, TypeReference toValueTypeRef) { + return OBJECT_MAPPER.convertValue(fromValue, toValueTypeRef); + } + public static T mapToObject(Map map, Class beanClass) { if (map == null) { return null; @@ -177,6 +181,13 @@ public static T parseTypeReferenceObject(String text, TypeReference typeR } } + public static T parseTypeReferenceObject(Object object, TypeReference typeReference) { + if (object == null) { + return null; + } + return convertValue(object, typeReference); + } + public static T parseTypeReferenceObject(byte[] text, TypeReference typeReference) { try { return OBJECT_MAPPER.readValue(text, typeReference); diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java index 0ad07577f9..7d83bd4f3f 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.canal.interceptor; -import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkIncrementConfig; import org.apache.eventmesh.connector.canal.CanalConnectRecord; import org.apache.eventmesh.connector.canal.dialect.DbDialect; import org.apache.eventmesh.connector.canal.model.EventColumn; @@ -40,7 +40,7 @@ public class SqlBuilderLoadInterceptor { @Setter private DbDialect dbDialect; - public boolean before(CanalSinkConfig sinkConfig, CanalConnectRecord record) { + public boolean before(CanalSinkIncrementConfig sinkConfig, CanalConnectRecord record) { // build sql SqlTemplate sqlTemplate = dbDialect.getSqlTemplate(); EventType type = record.getEventType(); @@ -52,12 +52,12 @@ public boolean before(CanalSinkConfig sinkConfig, CanalConnectRecord record) { if (type.isInsert()) { sql = sqlTemplate.getMergeSql(schemaName, - record.getTableName(), - buildColumnNames(record.getKeys()), - buildColumnNames(record.getColumns()), - new String[] {}, - true, - shardColumns); + record.getTableName(), + buildColumnNames(record.getKeys()), + buildColumnNames(record.getColumns()), + new String[] {}, + true, + shardColumns); } else if (type.isUpdate()) { boolean existOldKeys = !CollectionUtils.isEmpty(record.getOldKeys()); boolean rowMode = sinkConfig.getSyncMode().isRow(); diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java new file mode 100644 index 0000000000..84e01ca85c --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java @@ -0,0 +1,406 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.Constants; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLColumnDef; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.SqlUtils; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import org.apache.commons.lang3.StringUtils; + +import java.math.BigDecimal; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Types; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.LockSupport; + +import com.alibaba.druid.pool.DruidPooledConnection; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSinkCheckConnector implements Sink, ConnectorCreateService { + private CanalSinkFullConfig config; + private RdbTableMgr tableMgr; + private final DateTimeFormatter dataTimePattern = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSSSSS"); + + @Override + public void start() throws Exception { + tableMgr.start(); + } + + @Override + public void stop() throws Exception { + + } + + @Override + public Sink create() { + return new CanalSinkCheckConnector(); + } + + @Override + public Class configClass() { + return CanalSinkFullConfig.class; + } + + @Override + public void init(Config config) throws Exception { + this.config = (CanalSinkFullConfig) config; + init(); + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + this.config = (CanalSinkFullConfig) ((SinkConnectorContext) connectorContext).getSinkConfig(); + init(); + } + + private void init() { + if (config.getSinkConnectorConfig() == null) { + throw new EventMeshException(String.format("[%s] sink config is null", this.getClass())); + } + DatabaseConnection.sinkConfig = this.config.getSinkConnectorConfig(); + DatabaseConnection.initSinkConnection(); + DatabaseConnection.sinkDataSource.setDefaultAutoCommit(false); + + tableMgr = new RdbTableMgr(this.config.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); + } + + @Override + public void commit(ConnectRecord record) { + + } + + @Override + public String name() { + return null; + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void put(List sinkRecords) { + if (sinkRecords == null || sinkRecords.isEmpty() || sinkRecords.get(0) == null) { + if (log.isDebugEnabled()) { + log.debug("[{}] got sink records are none", this.getClass()); + } + return; + } + ConnectRecord record = sinkRecords.get(0); + List> data = (List>) record.getData(); + if (data == null || data.isEmpty()) { + if (log.isDebugEnabled()) { + log.debug("[{}] got rows data is none", this.getClass()); + } + return; + } + CanalFullRecordOffset offset = (CanalFullRecordOffset) record.getPosition().getRecordOffset(); + if (offset == null || offset.getPosition() == null) { + if (log.isDebugEnabled()) { + log.debug("[{}] got canal full offset is none", this.getClass()); + } + return; + } + + MySQLTableDef tableDefinition = (MySQLTableDef) tableMgr.getTable(offset.getPosition().getSchema(), offset.getPosition().getTableName()); + if (tableDefinition == null) { + log.warn("target schema [{}] table [{}] is not exists", offset.getPosition().getSchema(), offset.getPosition().getTableName()); + return; + } + List cols = new ArrayList<>(tableDefinition.getColumnDefinitions().values()); + String sql = generateInsertPrepareSql(offset.getPosition().getSchema(), offset.getPosition().getTableName(), + cols); + DruidPooledConnection connection = null; + PreparedStatement statement = null; + try { + connection = DatabaseConnection.sinkDataSource.getConnection(); + statement = + connection.prepareStatement(sql); + for (Map col : data) { + setPrepareParams(statement, col, cols); + log.info("insert sql {}", statement.toString()); + statement.addBatch(); + } + statement.executeBatch(); + connection.commit(); + } catch (SQLException e) { + log.warn("full sink process schema [{}] table [{}] connector write fail", tableDefinition.getSchemaName(), tableDefinition.getTableName(), + e); + LockSupport.parkNanos(3000 * 1000L); + } catch (Exception e) { + log.error("full sink process schema [{}] table [{}] catch unknown exception", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + try { + if (connection != null && !connection.isClosed()) { + connection.rollback(); + } + } catch (SQLException rollback) { + log.warn("full sink process schema [{}] table [{}] rollback fail", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + } + } finally { + if (statement != null) { + try { + statement.close(); + } catch (SQLException e) { + log.info("close prepare statement fail", e); + } + } + + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + log.info("close db connection fail", e); + } + } + } + } + + private void setPrepareParams(PreparedStatement preparedStatement, Map col, List columnDefs) throws Exception { + for (int i = 0; i < columnDefs.size(); i++) { + writeColumn(preparedStatement, i + 1, columnDefs.get(i), col.get(columnDefs.get(i).getName())); + } + } + + public void writeColumn(PreparedStatement ps, int index, MySQLColumnDef colType, Object value) throws Exception { + if (colType == null) { + String colVal = null; + if (value != null) { + colVal = value.toString(); + } + if (colVal == null) { + ps.setNull(index, Types.VARCHAR); + } else { + ps.setString(index, colVal); + } + } else if (value == null) { + ps.setNull(index, colType.getJdbcType().getVendorTypeNumber()); + } else { + switch (colType.getType()) { + case TINYINT: + case SMALLINT: + case MEDIUMINT: + case INT: + Long longValue = SqlUtils.toLong(value); + if (longValue == null) { + ps.setNull(index, 4); + return; + } else { + ps.setLong(index, longValue); + return; + } + case BIGINT: + case DECIMAL: + BigDecimal bigDecimalValue = SqlUtils.toBigDecimal(value); + if (bigDecimalValue == null) { + ps.setNull(index, 3); + return; + } else { + ps.setBigDecimal(index, bigDecimalValue); + return; + } + case FLOAT: + case DOUBLE: + Double doubleValue = SqlUtils.toDouble(value); + if (doubleValue == null) { + ps.setNull(index, 8); + } else { + ps.setDouble(index, doubleValue); + } + return; + case DATE: + case DATETIME: + case TIMESTAMP: + LocalDateTime dateValue = null; + if (!SqlUtils.isZeroTime(value)) { + try { + dateValue = SqlUtils.toLocalDateTime(value); + } catch (Exception e) { + ps.setString(index, SqlUtils.convertToString(value)); + return; + } + } else if (StringUtils.isNotBlank(config.getZeroDate())) { + dateValue = SqlUtils.toLocalDateTime(config.getZeroDate()); + } else { + ps.setObject(index, value); + return; + } + if (dateValue == null) { + ps.setNull(index, Types.TIMESTAMP); + } else { + ps.setString(index, dataTimePattern.format(dateValue)); + } + return; + case TIME: + String timeValue = SqlUtils.toMySqlTime(value); + if (StringUtils.isBlank(timeValue)) { + ps.setNull(index, 12); + return; + } else { + ps.setString(index, timeValue); + return; + } + case YEAR: + LocalDateTime yearValue = null; + if (!SqlUtils.isZeroTime(value)) { + yearValue = SqlUtils.toLocalDateTime(value); + } else if (StringUtils.isNotBlank(config.getZeroDate())) { + yearValue = SqlUtils.toLocalDateTime(config.getZeroDate()); + } else { + ps.setInt(index, 0); + return; + } + if (yearValue == null) { + ps.setNull(index, 4); + } else { + ps.setInt(index, yearValue.getYear()); + } + return; + case CHAR: + case VARCHAR: + case TINYTEXT: + case TEXT: + case MEDIUMTEXT: + case LONGTEXT: + case ENUM: + case SET: + String strValue = value.toString(); + if (strValue == null) { + ps.setNull(index, Types.VARCHAR); + return; + } else { + ps.setString(index, strValue); + return; + } + case JSON: + String jsonValue = value.toString(); + if (jsonValue == null) { + ps.setNull(index, Types.VARCHAR); + } else { + ps.setString(index, jsonValue); + } + return; + case BIT: + if (value instanceof Boolean) { + byte[] arrayBoolean = new byte[1]; + arrayBoolean[0] = (byte) (Boolean.TRUE.equals(value) ? 1 : 0); + ps.setBytes(index, arrayBoolean); + return; + } else if (value instanceof Number) { + ps.setBytes(index, SqlUtils.numberToBinaryArray((Number) value)); + return; + } else if ((value instanceof byte[]) || value.toString().startsWith("0x") || value.toString().startsWith("0X")) { + byte[] arrayBoolean = SqlUtils.toBytes(value); + if (arrayBoolean == null || arrayBoolean.length == 0) { + ps.setNull(index, Types.BIT); + return; + } else { + ps.setBytes(index, arrayBoolean); + return; + } + } else { + ps.setBytes(index, SqlUtils.numberToBinaryArray(SqlUtils.toInt(value))); + return; + } + case BINARY: + case VARBINARY: + case TINYBLOB: + case BLOB: + case MEDIUMBLOB: + case LONGBLOB: + byte[] binaryValue = SqlUtils.toBytes(value); + if (binaryValue == null) { + ps.setNull(index, Types.BINARY); + return; + } else { + ps.setBytes(index, binaryValue); + return; + } + case GEOMETRY: + case GEOMETRY_COLLECTION: + case GEOM_COLLECTION: + case POINT: + case LINESTRING: + case POLYGON: + case MULTIPOINT: + case MULTILINESTRING: + case MULTIPOLYGON: + String geoValue = SqlUtils.toGeometry(value); + if (geoValue == null) { + ps.setNull(index, Types.VARCHAR); + return; + } + ps.setString(index, geoValue); + return; + default: + throw new UnsupportedOperationException("columnType '" + colType + "' Unsupported."); + } + } + } + + private String generateInsertPrepareSql(String schema, String table, List cols) { + StringBuilder builder = new StringBuilder(); + builder.append("INSERT IGNORE INTO "); + builder.append(Constants.MySQLQuot); + builder.append(schema); + builder.append(Constants.MySQLQuot); + builder.append("."); + builder.append(Constants.MySQLQuot); + builder.append(table); + builder.append(Constants.MySQLQuot); + StringBuilder columns = new StringBuilder(); + StringBuilder values = new StringBuilder(); + for (MySQLColumnDef colInfo : cols) { + if (columns.length() > 0) { + columns.append(", "); + values.append(", "); + } + String wrapName = Constants.MySQLQuot + colInfo.getName() + Constants.MySQLQuot; + columns.append(wrapName); + values.append(colInfo.getType() == null ? "?" : colInfo.getType().genPrepareStatement4Insert()); + } + builder.append("(").append(columns).append(")"); + builder.append(" VALUES "); + builder.append("(").append(values).append(")"); + return builder.toString(); + } + + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java index 49fb10dd35..b03df2dfff 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java @@ -19,62 +19,14 @@ import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; -import org.apache.eventmesh.connector.canal.CanalConnectRecord; -import org.apache.eventmesh.connector.canal.DatabaseConnection; -import org.apache.eventmesh.connector.canal.SqlUtils; -import org.apache.eventmesh.connector.canal.dialect.DbDialect; -import org.apache.eventmesh.connector.canal.dialect.MysqlDialect; -import org.apache.eventmesh.connector.canal.interceptor.SqlBuilderLoadInterceptor; -import org.apache.eventmesh.connector.canal.model.EventColumn; -import org.apache.eventmesh.connector.canal.model.EventType; -import org.apache.eventmesh.connector.canal.sink.DbLoadContext; -import org.apache.eventmesh.connector.canal.sink.DbLoadData; -import org.apache.eventmesh.connector.canal.sink.DbLoadData.TableLoadData; -import org.apache.eventmesh.connector.canal.sink.DbLoadMerger; -import org.apache.eventmesh.connector.canal.sink.GtidBatch; -import org.apache.eventmesh.connector.canal.sink.GtidBatchManager; -import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.common.remote.job.JobType; import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; -import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; -import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.commons.lang3.SerializationUtils; - -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Types; -import java.util.ArrayList; -import java.util.Arrays; import java.util.List; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.springframework.dao.DataAccessException; -import org.springframework.dao.DeadlockLoserDataAccessException; -import org.springframework.jdbc.core.BatchPreparedStatementSetter; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.PreparedStatementSetter; -import org.springframework.jdbc.core.StatementCallback; -import org.springframework.jdbc.core.StatementCreatorUtils; -import org.springframework.jdbc.support.lob.DefaultLobHandler; -import org.springframework.jdbc.support.lob.LobCreator; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.util.CollectionUtils; - -import com.alibaba.otter.canal.common.utils.NamedThreadFactory; import lombok.extern.slf4j.Slf4j; @@ -83,21 +35,7 @@ public class CanalSinkConnector implements Sink, ConnectorCreateService { private CanalSinkConfig sinkConfig; - private JdbcTemplate jdbcTemplate; - - private SqlBuilderLoadInterceptor interceptor; - - private DbDialect dbDialect; - - private ExecutorService executor; - - private ExecutorService gtidSingleExecutor; - - private int batchSize = 50; - - private boolean useBatch = true; - - private RdbTableMgr tableMgr; + private Sink sink; @Override public Class configClass() { @@ -114,77 +52,46 @@ public void init(Config config) throws Exception { public void init(ConnectorContext connectorContext) throws Exception { // init config for canal source connector SinkConnectorContext sinkConnectorContext = (SinkConnectorContext) connectorContext; - this.sinkConfig = (CanalSinkConfig) sinkConnectorContext.getSinkConfig(); - this.batchSize = sinkConfig.getBatchSize(); - this.useBatch = sinkConfig.getUseBatch(); - DatabaseConnection.sinkConfig = this.sinkConfig.getSinkConnectorConfig(); - DatabaseConnection.initSinkConnection(); - jdbcTemplate = new JdbcTemplate(DatabaseConnection.sinkDataSource); - dbDialect = new MysqlDialect(jdbcTemplate, new DefaultLobHandler()); - interceptor = new SqlBuilderLoadInterceptor(); - interceptor.setDbDialect(dbDialect); - tableMgr = new RdbTableMgr(sinkConfig.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); - executor = new ThreadPoolExecutor(sinkConfig.getPoolSize(), - sinkConfig.getPoolSize(), - 0L, - TimeUnit.MILLISECONDS, - new ArrayBlockingQueue<>(sinkConfig.getPoolSize() * 4), - new NamedThreadFactory("canalSink"), - new ThreadPoolExecutor.CallerRunsPolicy()); - gtidSingleExecutor = Executors.newSingleThreadExecutor(r -> new Thread(r, "gtidSingleExecutor")); + if (sinkConnectorContext.getJobType().equals(JobType.FULL)) { + this.sink = new CanalSinkFullConnector(); + } else if (sinkConnectorContext.getJobType().equals(JobType.INCREASE)) { + this.sink = new CanalSinkIncrementConnector(); + } else if (sinkConnectorContext.getJobType().equals(JobType.CHECK)) { + this.sink = new CanalSinkCheckConnector(); + } else { + throw new RuntimeException("unsupported job type " + sinkConnectorContext.getJobType()); + } + this.sink.init(sinkConnectorContext); } @Override public void start() throws Exception { - tableMgr.start(); + this.sink.start(); } @Override public void commit(ConnectRecord record) { - + this.sink.commit(record); } @Override public String name() { - return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); + return this.sink.name(); } @Override public void onException(ConnectRecord record) { - + this.sink.onException(record); } @Override - public void stop() { - executor.shutdown(); - gtidSingleExecutor.shutdown(); + public void stop() throws Exception { + this.sink.stop(); } @Override public void put(List sinkRecords) { - DbLoadContext context = new DbLoadContext(); - for (ConnectRecord connectRecord : sinkRecords) { - List canalConnectRecordList = new ArrayList<>(); - // deep copy connectRecord data - for (CanalConnectRecord record : (List) connectRecord.getData()) { - canalConnectRecordList.add(SerializationUtils.clone(record)); - } - canalConnectRecordList = filterRecord(canalConnectRecordList); - if (isDdlDatas(canalConnectRecordList)) { - doDdl(context, canalConnectRecordList, connectRecord); - } else if (sinkConfig.isGTIDMode()) { - doLoadWithGtid(context, sinkConfig, connectRecord); - } else { - canalConnectRecordList = DbLoadMerger.merge(canalConnectRecordList); - - DbLoadData loadData = new DbLoadData(); - doBefore(canalConnectRecordList, loadData); - - doLoad(context, sinkConfig, loadData, connectRecord); - - } - - } + this.sink.put(sinkRecords); } @Override @@ -192,651 +99,4 @@ public Sink create() { return new CanalSinkConnector(); } - private boolean isDdlDatas(List canalConnectRecordList) { - boolean result = false; - for (CanalConnectRecord canalConnectRecord : canalConnectRecordList) { - result |= canalConnectRecord.getEventType().isDdl(); - if (result && !canalConnectRecord.getEventType().isDdl()) { - throw new RuntimeException("ddl/dml can't be in one batch, it's may be a bug , pls submit issues."); - } - } - return result; - } - - private List filterRecord(List canalConnectRecordList) { - return canalConnectRecordList.stream() - .filter(record -> tableMgr.getTable(record.getSchemaName(), record.getTableName()) != null) - .collect(Collectors.toList()); - } - - private void doDdl(DbLoadContext context, List canalConnectRecordList, ConnectRecord connectRecord) { - for (final CanalConnectRecord record : canalConnectRecordList) { - try { - Boolean result = jdbcTemplate.execute(new StatementCallback() { - - public Boolean doInStatement(Statement stmt) throws SQLException, DataAccessException { - boolean result = true; - if (StringUtils.isNotEmpty(record.getDdlSchemaName())) { - result &= stmt.execute("use `" + record.getDdlSchemaName() + "`"); - } - result &= stmt.execute(record.getSql()); - return result; - } - }); - if (Boolean.TRUE.equals(result)) { - context.getProcessedRecords().add(record); - } else { - context.getFailedRecords().add(record); - } - } catch (Throwable e) { - connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, e)); - throw new RuntimeException(e); - } - } - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - - private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { - SendExceptionContext sendExceptionContext = new SendExceptionContext(); - sendExceptionContext.setMessageId(record.getRecordId()); - sendExceptionContext.setCause(e); - if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { - sendExceptionContext.setTopic(record.getExtension("topic")); - } - return sendExceptionContext; - } - - private SendResult convertToSendResult(ConnectRecord record) { - SendResult result = new SendResult(); - result.setMessageId(record.getRecordId()); - if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { - result.setTopic(record.getExtension("topic")); - } - return result; - } - - private void doBefore(List canalConnectRecordList, final DbLoadData loadData) { - for (final CanalConnectRecord record : canalConnectRecordList) { - boolean filter = interceptor.before(sinkConfig, record); - if (!filter) { - loadData.merge(record); - } - } - } - - private void doLoad(DbLoadContext context, CanalSinkConfig sinkConfig, DbLoadData loadData, ConnectRecord connectRecord) { - List> batchDatas = new ArrayList<>(); - for (TableLoadData tableData : loadData.getTables()) { - if (useBatch) { - batchDatas.addAll(split(tableData.getDeleteDatas())); - } else { - for (CanalConnectRecord data : tableData.getDeleteDatas()) { - batchDatas.add(Arrays.asList(data)); - } - } - } - - doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); - - batchDatas.clear(); - - for (TableLoadData tableData : loadData.getTables()) { - if (useBatch) { - batchDatas.addAll(split(tableData.getInsertDatas())); - batchDatas.addAll(split(tableData.getUpdateDatas())); - } else { - for (CanalConnectRecord data : tableData.getInsertDatas()) { - batchDatas.add(Arrays.asList(data)); - } - for (CanalConnectRecord data : tableData.getUpdateDatas()) { - batchDatas.add(Arrays.asList(data)); - } - } - } - - doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); - - batchDatas.clear(); - } - - private void doLoadWithGtid(DbLoadContext context, CanalSinkConfig sinkConfig, ConnectRecord connectRecord) { - int batchIndex = connectRecord.getExtension("batchIndex", Integer.class); - int totalBatches = connectRecord.getExtension("totalBatches", Integer.class); - List canalConnectRecordList = (List) connectRecord.getData(); - String gtid = canalConnectRecordList.get(0).getCurrentGtid(); - GtidBatchManager.addBatch(gtid, batchIndex, totalBatches, canalConnectRecordList); - // check whether the batch is complete - if (GtidBatchManager.isComplete(gtid)) { - GtidBatch batch = GtidBatchManager.getGtidBatch(gtid); - List> totalRows = batch.getBatches(); - List filteredRows = new ArrayList<>(); - for (List canalConnectRecords : totalRows) { - canalConnectRecords = filterRecord(canalConnectRecords); - if (!CollectionUtils.isEmpty(canalConnectRecords)) { - for (final CanalConnectRecord record : canalConnectRecords) { - boolean filter = interceptor.before(sinkConfig, record); - filteredRows.add(record); - } - } - } - context.setGtid(gtid); - Future result = gtidSingleExecutor.submit(new DbLoadWorker(context, filteredRows, dbDialect, false, sinkConfig)); - Exception ex = null; - try { - ex = result.get(); - if (ex == null) { - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - } catch (Exception e) { - ex = e; - } - Boolean skipException = sinkConfig.getSkipException(); - if (skipException != null && skipException) { - if (ex != null) { - // do skip - log.warn("skip exception will ack data : {} , caused by {}", - filteredRows, - ExceptionUtils.getFullStackTrace(ex)); - GtidBatchManager.removeGtidBatch(gtid); - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - } else { - if (ex != null) { - log.error("sink connector will shutdown by " + ex.getMessage(), ExceptionUtils.getFullStackTrace(ex)); - connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); - gtidSingleExecutor.shutdown(); - System.exit(1); - } else { - GtidBatchManager.removeGtidBatch(gtid); - } - } - } else { - log.info("Batch received, waiting for other batches."); - // ack this record - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - } - - private List> split(List records) { - List> result = new ArrayList<>(); - if (records == null || records.isEmpty()) { - return result; - } else { - int[] bits = new int[records.size()]; - for (int i = 0; i < bits.length; i++) { - while (i < bits.length && bits[i] == 1) { - i++; - } - - if (i >= bits.length) { - break; - } - - List batch = new ArrayList<>(); - bits[i] = 1; - batch.add(records.get(i)); - for (int j = i + 1; j < bits.length && batch.size() < batchSize; j++) { - if (bits[j] == 0 && canBatch(records.get(i), records.get(j))) { - batch.add(records.get(j)); - bits[j] = 1; - } - } - result.add(batch); - } - - return result; - } - } - - private boolean canBatch(CanalConnectRecord source, CanalConnectRecord target) { - return StringUtils.equals(source.getSchemaName(), - target.getSchemaName()) - && StringUtils.equals(source.getTableName(), target.getTableName()) - && StringUtils.equals(source.getSql(), target.getSql()); - } - - private void doTwoPhase(DbLoadContext context, CanalSinkConfig sinkConfig, List> totalRows, boolean canBatch, - ConnectRecord connectRecord) { - List> results = new ArrayList<>(); - for (List rows : totalRows) { - if (CollectionUtils.isEmpty(rows)) { - continue; - } - results.add(executor.submit(new DbLoadWorker(context, rows, dbDialect, canBatch, sinkConfig))); - } - - boolean partFailed = false; - for (Future result : results) { - Exception ex = null; - try { - ex = result.get(); - if (ex == null) { - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - } catch (Exception e) { - ex = e; - } - - if (ex != null) { - log.warn("##load phase one failed!", ex); - partFailed = true; - } - } - - if (partFailed) { - List retryRecords = new ArrayList<>(); - for (List rows : totalRows) { - retryRecords.addAll(rows); - } - - context.getFailedRecords().clear(); - - Boolean skipException = sinkConfig.getSkipException(); - if (skipException != null && skipException) { - for (CanalConnectRecord retryRecord : retryRecords) { - DbLoadWorker worker = new DbLoadWorker(context, Arrays.asList(retryRecord), dbDialect, false, sinkConfig); - try { - Exception ex = worker.call(); - if (ex != null) { - // do skip - log.warn("skip exception for data : {} , caused by {}", - retryRecord, - ExceptionUtils.getFullStackTrace(ex)); - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - } catch (Exception ex) { - // do skip - log.warn("skip exception for data : {} , caused by {}", - retryRecord, - ExceptionUtils.getFullStackTrace(ex)); - connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); - } - } - } else { - DbLoadWorker worker = new DbLoadWorker(context, retryRecords, dbDialect, false, sinkConfig); - try { - Exception ex = worker.call(); - if (ex != null) { - throw ex; - } - } catch (Exception ex) { - log.error("##load phase two failed!", ex); - log.error("sink connector will shutdown by " + ex.getMessage(), ex); - connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); - executor.shutdown(); - System.exit(1); - } - } - } - } - - enum ExecuteResult { - SUCCESS, ERROR, RETRY - } - - class DbLoadWorker implements Callable { - - private final DbLoadContext context; - private final DbDialect dbDialect; - private final List records; - private final boolean canBatch; - - private final CanalSinkConfig sinkConfig; - - private final List allFailedRecords = new ArrayList<>(); - private final List allProcessedRecords = new ArrayList<>(); - private final List processedRecords = new ArrayList<>(); - private final List failedRecords = new ArrayList<>(); - - public DbLoadWorker(DbLoadContext context, List records, DbDialect dbDialect, boolean canBatch, - CanalSinkConfig sinkConfig) { - this.context = context; - this.records = records; - this.canBatch = canBatch; - this.dbDialect = dbDialect; - this.sinkConfig = sinkConfig; - } - - public Exception call() throws Exception { - try { - return doCall(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private Exception doCall() { - RuntimeException error = null; - ExecuteResult exeResult = null; - - if (sinkConfig.isGTIDMode()) { - int retryCount = 0; - final List toExecuteRecords = new ArrayList<>(); - try { - if (!CollectionUtils.isEmpty(failedRecords)) { - // if failedRecords not empty, make it retry - toExecuteRecords.addAll(failedRecords); - } else { - toExecuteRecords.addAll(records); - // add to failed record first, maybe get lob or datasource error - failedRecords.addAll(toExecuteRecords); - } - JdbcTemplate template = dbDialect.getJdbcTemplate(); - String sourceGtid = context.getGtid(); - if (StringUtils.isNotEmpty(sourceGtid) && !sinkConfig.isMariaDB()) { - String setMySQLGtid = "SET @@session.gtid_next = '" + sourceGtid + "';"; - template.execute(setMySQLGtid); - } else if (StringUtils.isNotEmpty(sourceGtid) && sinkConfig.isMariaDB()) { - throw new RuntimeException("unsupport gtid mode for mariaDB"); - } else { - log.error("gtid is empty in gtid mode"); - throw new RuntimeException("gtid is empty in gtid mode"); - } - - final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator(); - int affect = (Integer) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { - try { - failedRecords.clear(); - processedRecords.clear(); - int affect1 = 0; - for (CanalConnectRecord record : toExecuteRecords) { - int affects = template.update(record.getSql(), new PreparedStatementSetter() { - public void setValues(PreparedStatement ps) throws SQLException { - doPreparedStatement(ps, dbDialect, lobCreator, record); - } - }); - affect1 = affect1 + affects; - processStat(record, affects, false); - } - return affect1; - } catch (Exception e) { - // rollback - status.setRollbackOnly(); - throw new RuntimeException("Failed to executed", e); - } finally { - lobCreator.close(); - } - }); - - // reset gtid - if (sinkConfig.isMariaDB()) { - throw new RuntimeException("unsupport gtid mode for mariaDB"); - } else { - String resetMySQLGtid = "SET @@session.gtid_next = 'AUTOMATIC';"; - dbDialect.getJdbcTemplate().execute(resetMySQLGtid); - } - - error = null; - exeResult = ExecuteResult.SUCCESS; - } catch (DeadlockLoserDataAccessException ex) { - error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); - exeResult = ExecuteResult.RETRY; - } catch (Throwable ex) { - error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); - exeResult = ExecuteResult.ERROR; - } - - if (ExecuteResult.SUCCESS == exeResult) { - allFailedRecords.addAll(failedRecords); - allProcessedRecords.addAll(processedRecords); - failedRecords.clear(); - processedRecords.clear(); - } else if (ExecuteResult.RETRY == exeResult) { - retryCount = retryCount + 1; - processedRecords.clear(); - failedRecords.clear(); - failedRecords.addAll(toExecuteRecords); - int retry = 3; - if (retryCount >= retry) { - processFailedDatas(toExecuteRecords.size()); - throw new RuntimeException(String.format("execute retry %s times failed", retryCount), error); - } else { - try { - int retryWait = 3000; - int wait = retryCount * retryWait; - wait = Math.max(wait, retryWait); - Thread.sleep(wait); - } catch (InterruptedException ex) { - Thread.interrupted(); - processFailedDatas(toExecuteRecords.size()); - throw new RuntimeException(ex); - } - } - } else { - processedRecords.clear(); - failedRecords.clear(); - failedRecords.addAll(toExecuteRecords); - processFailedDatas(toExecuteRecords.size()); - throw error; - } - } else { - int index = 0; - while (index < records.size()) { - final List toExecuteRecords = new ArrayList<>(); - if (useBatch && canBatch) { - int end = Math.min(index + batchSize, records.size()); - toExecuteRecords.addAll(records.subList(index, end)); - index = end; - } else { - toExecuteRecords.add(records.get(index)); - index = index + 1; - } - - int retryCount = 0; - while (true) { - try { - if (!CollectionUtils.isEmpty(failedRecords)) { - toExecuteRecords.clear(); - toExecuteRecords.addAll(failedRecords); - } else { - failedRecords.addAll(toExecuteRecords); - } - - final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator(); - if (useBatch && canBatch) { - JdbcTemplate template = dbDialect.getJdbcTemplate(); - final String sql = toExecuteRecords.get(0).getSql(); - - int[] affects = new int[toExecuteRecords.size()]; - - affects = (int[]) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { - try { - failedRecords.clear(); - processedRecords.clear(); - int[] affects1 = template.batchUpdate(sql, new BatchPreparedStatementSetter() { - - public void setValues(PreparedStatement ps, int idx) throws SQLException { - doPreparedStatement(ps, dbDialect, lobCreator, toExecuteRecords.get(idx)); - } - - public int getBatchSize() { - return toExecuteRecords.size(); - } - }); - return affects1; - } catch (Exception e) { - // rollback - status.setRollbackOnly(); - throw new RuntimeException("Failed to execute batch with GTID", e); - } finally { - lobCreator.close(); - } - }); - - for (int i = 0; i < toExecuteRecords.size(); i++) { - assert affects != null; - processStat(toExecuteRecords.get(i), affects[i], true); - } - } else { - final CanalConnectRecord record = toExecuteRecords.get(0); - JdbcTemplate template = dbDialect.getJdbcTemplate(); - int affect = 0; - affect = (Integer) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { - try { - failedRecords.clear(); - processedRecords.clear(); - int affect1 = template.update(record.getSql(), new PreparedStatementSetter() { - - public void setValues(PreparedStatement ps) throws SQLException { - doPreparedStatement(ps, dbDialect, lobCreator, record); - } - }); - return affect1; - } catch (Exception e) { - // rollback - status.setRollbackOnly(); - throw new RuntimeException("Failed to executed", e); - } finally { - lobCreator.close(); - } - }); - processStat(record, affect, false); - } - - error = null; - exeResult = ExecuteResult.SUCCESS; - } catch (DeadlockLoserDataAccessException ex) { - error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); - exeResult = ExecuteResult.RETRY; - } catch (Throwable ex) { - error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); - exeResult = ExecuteResult.ERROR; - } - - if (ExecuteResult.SUCCESS == exeResult) { - allFailedRecords.addAll(failedRecords); - allProcessedRecords.addAll(processedRecords); - failedRecords.clear(); - processedRecords.clear(); - break; // do next eventData - } else if (ExecuteResult.RETRY == exeResult) { - retryCount = retryCount + 1; - processedRecords.clear(); - failedRecords.clear(); - failedRecords.addAll(toExecuteRecords); - int retry = 3; - if (retryCount >= retry) { - processFailedDatas(index); - throw new RuntimeException(String.format("execute retry %s times failed", retryCount), error); - } else { - try { - int retryWait = 3000; - int wait = retryCount * retryWait; - wait = Math.max(wait, retryWait); - Thread.sleep(wait); - } catch (InterruptedException ex) { - Thread.interrupted(); - processFailedDatas(index); - throw new RuntimeException(ex); - } - } - } else { - processedRecords.clear(); - failedRecords.clear(); - failedRecords.addAll(toExecuteRecords); - processFailedDatas(index); - throw error; - } - } - } - } - - context.getFailedRecords().addAll(allFailedRecords); - context.getProcessedRecords().addAll(allProcessedRecords); - return null; - } - - private void doPreparedStatement(PreparedStatement ps, DbDialect dbDialect, LobCreator lobCreator, - CanalConnectRecord record) throws SQLException { - EventType type = record.getEventType(); - List columns = new ArrayList(); - if (type.isInsert()) { - columns.addAll(record.getColumns()); - columns.addAll(record.getKeys()); - } else if (type.isDelete()) { - columns.addAll(record.getKeys()); - } else if (type.isUpdate()) { - boolean existOldKeys = !CollectionUtils.isEmpty(record.getOldKeys()); - columns.addAll(record.getUpdatedColumns()); - columns.addAll(record.getKeys()); - if (existOldKeys) { - columns.addAll(record.getOldKeys()); - } - } - - for (int i = 0; i < columns.size(); i++) { - int paramIndex = i + 1; - EventColumn column = columns.get(i); - int sqlType = column.getColumnType(); - - Object param = null; - if (dbDialect instanceof MysqlDialect - && (sqlType == Types.TIME || sqlType == Types.TIMESTAMP || sqlType == Types.DATE)) { - param = column.getColumnValue(); - } else { - param = SqlUtils.stringToSqlValue(column.getColumnValue(), - sqlType, - false, - dbDialect.isEmptyStringNulled()); - } - - try { - switch (sqlType) { - case Types.CLOB: - lobCreator.setClobAsString(ps, paramIndex, (String) param); - break; - - case Types.BLOB: - lobCreator.setBlobAsBytes(ps, paramIndex, (byte[]) param); - break; - case Types.TIME: - case Types.TIMESTAMP: - case Types.DATE: - if (dbDialect instanceof MysqlDialect) { - ps.setObject(paramIndex, param); - } else { - StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); - } - break; - case Types.BIT: - if (dbDialect instanceof MysqlDialect) { - StatementCreatorUtils.setParameterValue(ps, paramIndex, Types.DECIMAL, null, param); - } else { - StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); - } - break; - default: - StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); - break; - } - } catch (SQLException ex) { - log.error("## SetParam error , [pairId={}, sqltype={}, value={}]", - record.getPairId(), sqlType, param); - throw ex; - } - } - } - - private void processStat(CanalConnectRecord record, int affect, boolean batch) { - if (batch && (affect < 1 && affect != Statement.SUCCESS_NO_INFO)) { - failedRecords.add(record); - } else if (!batch && affect < 1) { - failedRecords.add(record); - } else { - processedRecords.add(record); - // this.processStat(record, context); - } - } - - private void processFailedDatas(int index) { - allFailedRecords.addAll(failedRecords); - context.getFailedRecords().addAll(allFailedRecords); - for (; index < records.size(); index++) { - context.getFailedRecords().add(records.get(index)); - } - allProcessedRecords.addAll(processedRecords); - context.getProcessedRecords().addAll(allProcessedRecords); - } - } - } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java index 2b4c9d7a94..4137123922 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java @@ -18,12 +18,14 @@ package org.apache.eventmesh.connector.canal.sink.connector; import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkFullConfig; import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.Constants; import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLColumnDef; import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; import org.apache.eventmesh.common.exception.EventMeshException; import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.canal.DatabaseConnection; import org.apache.eventmesh.connector.canal.SqlUtils; import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; @@ -31,7 +33,10 @@ import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; import org.apache.commons.lang3.StringUtils; @@ -47,11 +52,13 @@ import java.util.concurrent.locks.LockSupport; import com.alibaba.druid.pool.DruidPooledConnection; +import com.fasterxml.jackson.core.type.TypeReference; import lombok.extern.slf4j.Slf4j; @Slf4j public class CanalSinkFullConnector implements Sink, ConnectorCreateService { + private CanalSinkFullConfig config; private RdbTableMgr tableMgr; private final DateTimeFormatter dataTimePattern = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSSSSS"); @@ -84,19 +91,21 @@ public void init(Config config) throws Exception { @Override public void init(ConnectorContext connectorContext) throws Exception { - this.config = (CanalSinkFullConfig) ((SinkConnectorContext) connectorContext).getSinkConfig(); + SinkConnectorContext sinkConnectorContext = (SinkConnectorContext) connectorContext; + CanalSinkConfig canalSinkConfig = (CanalSinkConfig) sinkConnectorContext.getSinkConfig(); + this.config = ConfigUtil.parse(canalSinkConfig.getSinkConfig(), CanalSinkFullConfig.class); init(); } private void init() { - if (config.getSinkConfig() == null) { + if (config.getSinkConnectorConfig() == null) { throw new EventMeshException(String.format("[%s] sink config is null", this.getClass())); } - DatabaseConnection.sinkConfig = this.config.getSinkConfig(); + DatabaseConnection.sinkConfig = this.config.getSinkConnectorConfig(); DatabaseConnection.initSinkConnection(); DatabaseConnection.sinkDataSource.setDefaultAutoCommit(false); - tableMgr = new RdbTableMgr(this.config.getSinkConfig(), DatabaseConnection.sinkDataSource); + tableMgr = new RdbTableMgr(this.config.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); } @Override @@ -123,7 +132,9 @@ public void put(List sinkRecords) { return; } ConnectRecord record = sinkRecords.get(0); - List> data = (List>) record.getData(); + List> data = + JsonUtils.parseTypeReferenceObject((byte[]) record.getData(), new TypeReference>>() { + }); if (data == null || data.isEmpty()) { if (log.isDebugEnabled()) { log.debug("[{}] got rows data is none", this.getClass()); @@ -159,13 +170,16 @@ public void put(List sinkRecords) { } statement.executeBatch(); connection.commit(); + record.getCallback().onSuccess(convertToSendResult(record)); } catch (SQLException e) { log.warn("full sink process schema [{}] table [{}] connector write fail", tableDefinition.getSchemaName(), tableDefinition.getTableName(), e); LockSupport.parkNanos(3000 * 1000L); + record.getCallback().onException(buildSendExceptionContext(record, e)); } catch (Exception e) { log.error("full sink process schema [{}] table [{}] catch unknown exception", tableDefinition.getSchemaName(), tableDefinition.getTableName(), e); + record.getCallback().onException(buildSendExceptionContext(record, e)); try { if (connection != null && !connection.isClosed()) { connection.rollback(); @@ -193,6 +207,25 @@ public void put(List sinkRecords) { } } + private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { + SendExceptionContext sendExceptionContext = new SendExceptionContext(); + sendExceptionContext.setMessageId(record.getRecordId()); + sendExceptionContext.setCause(e); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + sendExceptionContext.setTopic(record.getExtension("topic")); + } + return sendExceptionContext; + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + private void setPrepareParams(PreparedStatement preparedStatement, Map col, List columnDefs) throws Exception { for (int i = 0; i < columnDefs.size(); i++) { writeColumn(preparedStatement, i + 1, columnDefs.get(i), col.get(columnDefs.get(i).getName())); diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java new file mode 100644 index 0000000000..e165a5ffe6 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java @@ -0,0 +1,865 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkIncrementConfig; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.SqlUtils; +import org.apache.eventmesh.connector.canal.dialect.DbDialect; +import org.apache.eventmesh.connector.canal.dialect.MysqlDialect; +import org.apache.eventmesh.connector.canal.interceptor.SqlBuilderLoadInterceptor; +import org.apache.eventmesh.connector.canal.model.EventColumn; +import org.apache.eventmesh.connector.canal.model.EventType; +import org.apache.eventmesh.connector.canal.sink.DbLoadContext; +import org.apache.eventmesh.connector.canal.sink.DbLoadData; +import org.apache.eventmesh.connector.canal.sink.DbLoadData.TableLoadData; +import org.apache.eventmesh.connector.canal.sink.DbLoadMerger; +import org.apache.eventmesh.connector.canal.sink.GtidBatch; +import org.apache.eventmesh.connector.canal.sink.GtidBatchManager; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.lang3.SerializationUtils; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import org.springframework.dao.DataAccessException; +import org.springframework.dao.DeadlockLoserDataAccessException; +import org.springframework.jdbc.core.BatchPreparedStatementSetter; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.PreparedStatementSetter; +import org.springframework.jdbc.core.StatementCallback; +import org.springframework.jdbc.core.StatementCreatorUtils; +import org.springframework.jdbc.support.lob.DefaultLobHandler; +import org.springframework.jdbc.support.lob.LobCreator; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.util.CollectionUtils; + +import com.alibaba.otter.canal.common.utils.NamedThreadFactory; +import com.fasterxml.jackson.core.type.TypeReference; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSinkIncrementConnector implements Sink, ConnectorCreateService { + + private CanalSinkIncrementConfig sinkConfig; + + private JdbcTemplate jdbcTemplate; + + private SqlBuilderLoadInterceptor interceptor; + + private DbDialect dbDialect; + + private ExecutorService executor; + + private ExecutorService gtidSingleExecutor; + + private int batchSize = 50; + + private boolean useBatch = true; + + private RdbTableMgr tableMgr; + + @Override + public Class configClass() { + return CanalSinkIncrementConfig.class; + } + + @Override + public void init(Config config) throws Exception { + // init config for canal source connector + this.sinkConfig = (CanalSinkIncrementConfig) config; + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + // init config for canal source connector + SinkConnectorContext sinkConnectorContext = (SinkConnectorContext) connectorContext; + CanalSinkConfig canalSinkConfig = (CanalSinkConfig) sinkConnectorContext.getSinkConfig(); + this.sinkConfig = ConfigUtil.parse(canalSinkConfig.getSinkConfig(), CanalSinkIncrementConfig.class); + this.batchSize = sinkConfig.getBatchSize(); + this.useBatch = sinkConfig.getUseBatch(); + DatabaseConnection.sinkConfig = this.sinkConfig.getSinkConnectorConfig(); + DatabaseConnection.initSinkConnection(); + jdbcTemplate = new JdbcTemplate(DatabaseConnection.sinkDataSource); + dbDialect = new MysqlDialect(jdbcTemplate, new DefaultLobHandler()); + interceptor = new SqlBuilderLoadInterceptor(); + interceptor.setDbDialect(dbDialect); + tableMgr = new RdbTableMgr(sinkConfig.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); + executor = new ThreadPoolExecutor(sinkConfig.getPoolSize(), + sinkConfig.getPoolSize(), + 0L, + TimeUnit.MILLISECONDS, + new ArrayBlockingQueue<>(sinkConfig.getPoolSize() * 4), + new NamedThreadFactory("canalSink"), + new ThreadPoolExecutor.CallerRunsPolicy()); + gtidSingleExecutor = Executors.newSingleThreadExecutor(r -> new Thread(r, "gtidSingleExecutor")); + } + + @Override + public void start() throws Exception { + tableMgr.start(); + } + + @Override + public void commit(ConnectRecord record) { + + } + + @Override + public String name() { + return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void stop() { + executor.shutdown(); + gtidSingleExecutor.shutdown(); + } + + @Override + public void put(List sinkRecords) { + DbLoadContext context = new DbLoadContext(); + for (ConnectRecord connectRecord : sinkRecords) { + List canalConnectRecordList = new ArrayList<>(); + + List canalConnectRecords = convertToCanalConnectRecord(connectRecord); + + // deep copy connectRecord data + for (CanalConnectRecord record : canalConnectRecords) { + canalConnectRecordList.add(SerializationUtils.clone(record)); + } + canalConnectRecordList = filterRecord(canalConnectRecordList); + if (isDdlDatas(canalConnectRecordList)) { + doDdl(context, canalConnectRecordList, connectRecord); + } else if (sinkConfig.isGTIDMode()) { + doLoadWithGtid(context, sinkConfig, connectRecord); + } else { + canalConnectRecordList = DbLoadMerger.merge(canalConnectRecordList); + + DbLoadData loadData = new DbLoadData(); + doBefore(canalConnectRecordList, loadData); + + doLoad(context, sinkConfig, loadData, connectRecord); + + } + + } + } + + @Override + public Sink create() { + return new CanalSinkIncrementConnector(); + } + + private boolean isDdlDatas(List canalConnectRecordList) { + boolean result = false; + for (CanalConnectRecord canalConnectRecord : canalConnectRecordList) { + result |= canalConnectRecord.getEventType().isDdl(); + if (result && !canalConnectRecord.getEventType().isDdl()) { + throw new RuntimeException("ddl/dml can't be in one batch, it's may be a bug , pls submit issues."); + } + } + return result; + } + + private List filterRecord(List canalConnectRecordList) { + return canalConnectRecordList.stream() + .filter(record -> tableMgr.getTable(record.getSchemaName(), record.getTableName()) != null) + .collect(Collectors.toList()); + } + + private void doDdl(DbLoadContext context, List canalConnectRecordList, ConnectRecord connectRecord) { + for (final CanalConnectRecord record : canalConnectRecordList) { + try { + Boolean result = jdbcTemplate.execute(new StatementCallback() { + + public Boolean doInStatement(Statement stmt) throws SQLException, DataAccessException { + boolean result = true; + if (StringUtils.isNotEmpty(record.getDdlSchemaName())) { + result &= stmt.execute("use `" + record.getDdlSchemaName() + "`"); + } + result &= stmt.execute(record.getSql()); + return result; + } + }); + if (Boolean.TRUE.equals(result)) { + context.getProcessedRecords().add(record); + } else { + context.getFailedRecords().add(record); + } + } catch (Throwable e) { + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, e)); + throw new RuntimeException(e); + } + } + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + + private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { + SendExceptionContext sendExceptionContext = new SendExceptionContext(); + sendExceptionContext.setMessageId(record.getRecordId()); + sendExceptionContext.setCause(e); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + sendExceptionContext.setTopic(record.getExtension("topic")); + } + return sendExceptionContext; + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + + private void doBefore(List canalConnectRecordList, final DbLoadData loadData) { + for (final CanalConnectRecord record : canalConnectRecordList) { + boolean filter = interceptor.before(sinkConfig, record); + if (!filter) { + loadData.merge(record); + } + } + } + + private void doLoad(DbLoadContext context, CanalSinkIncrementConfig sinkConfig, DbLoadData loadData, ConnectRecord connectRecord) { + List> batchDatas = new ArrayList<>(); + for (TableLoadData tableData : loadData.getTables()) { + if (useBatch) { + batchDatas.addAll(split(tableData.getDeleteDatas())); + } else { + for (CanalConnectRecord data : tableData.getDeleteDatas()) { + batchDatas.add(Arrays.asList(data)); + } + } + } + + doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); + + batchDatas.clear(); + + for (TableLoadData tableData : loadData.getTables()) { + if (useBatch) { + batchDatas.addAll(split(tableData.getInsertDatas())); + batchDatas.addAll(split(tableData.getUpdateDatas())); + } else { + for (CanalConnectRecord data : tableData.getInsertDatas()) { + batchDatas.add(Arrays.asList(data)); + } + for (CanalConnectRecord data : tableData.getUpdateDatas()) { + batchDatas.add(Arrays.asList(data)); + } + } + } + + doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); + + batchDatas.clear(); + } + + private void doLoadWithGtid(DbLoadContext context, CanalSinkIncrementConfig sinkConfig, ConnectRecord connectRecord) { + int batchIndex = connectRecord.getExtension("batchIndex", Integer.class); + int totalBatches = connectRecord.getExtension("totalBatches", Integer.class); + List canalConnectRecordList = convertToCanalConnectRecord(connectRecord); + + String gtid = canalConnectRecordList.get(0).getCurrentGtid(); + GtidBatchManager.addBatch(gtid, batchIndex, totalBatches, canalConnectRecordList); + // check whether the batch is complete + if (GtidBatchManager.isComplete(gtid)) { + GtidBatch batch = GtidBatchManager.getGtidBatch(gtid); + List> totalRows = batch.getBatches(); + List filteredRows = new ArrayList<>(); + for (List canalConnectRecords : totalRows) { + canalConnectRecords = filterRecord(canalConnectRecords); + if (!CollectionUtils.isEmpty(canalConnectRecords)) { + for (final CanalConnectRecord record : canalConnectRecords) { + boolean filter = interceptor.before(sinkConfig, record); + filteredRows.add(record); + } + } + } + context.setGtid(gtid); + Future result = gtidSingleExecutor.submit(new DbLoadWorker(context, filteredRows, dbDialect, false, sinkConfig)); + Exception ex = null; + try { + ex = result.get(); + if (ex == null) { + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } catch (Exception e) { + ex = e; + } + Boolean skipException = sinkConfig.getSkipException(); + if (skipException != null && skipException) { + if (ex != null) { + // do skip + log.warn("skip exception will ack data : {} , caused by {}", + filteredRows, + ExceptionUtils.getFullStackTrace(ex)); + GtidBatchManager.removeGtidBatch(gtid); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } else { + if (ex != null) { + log.error("sink connector will shutdown by " + ex.getMessage(), ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); + gtidSingleExecutor.shutdown(); + System.exit(1); + } else { + GtidBatchManager.removeGtidBatch(gtid); + } + } + } else { + log.info("Batch received, waiting for other batches."); + // ack this record + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } + + private List convertToCanalConnectRecord(ConnectRecord connectRecord) { + List canalConnectRecordList; + try { + canalConnectRecordList = + JsonUtils.parseTypeReferenceObject((byte[]) connectRecord.getData(), new TypeReference>() { + }); + } catch (Exception e) { + log.error("Failed to parse the canalConnectRecords.", e); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, e)); + throw new RuntimeException("Failed to parse the canalConnectRecords.", e); + } + return canalConnectRecordList; + } + + private List> split(List records) { + List> result = new ArrayList<>(); + if (records == null || records.isEmpty()) { + return result; + } else { + int[] bits = new int[records.size()]; + for (int i = 0; i < bits.length; i++) { + while (i < bits.length && bits[i] == 1) { + i++; + } + + if (i >= bits.length) { + break; + } + + List batch = new ArrayList<>(); + bits[i] = 1; + batch.add(records.get(i)); + for (int j = i + 1; j < bits.length && batch.size() < batchSize; j++) { + if (bits[j] == 0 && canBatch(records.get(i), records.get(j))) { + batch.add(records.get(j)); + bits[j] = 1; + } + } + result.add(batch); + } + + return result; + } + } + + private boolean canBatch(CanalConnectRecord source, CanalConnectRecord target) { + return StringUtils.equals(source.getSchemaName(), + target.getSchemaName()) + && StringUtils.equals(source.getTableName(), target.getTableName()) + && StringUtils.equals(source.getSql(), target.getSql()); + } + + private void doTwoPhase(DbLoadContext context, CanalSinkIncrementConfig sinkConfig, List> totalRows, boolean canBatch, + ConnectRecord connectRecord) { + List> results = new ArrayList<>(); + for (List rows : totalRows) { + if (CollectionUtils.isEmpty(rows)) { + continue; + } + results.add(executor.submit(new DbLoadWorker(context, rows, dbDialect, canBatch, sinkConfig))); + } + + boolean partFailed = false; + for (Future result : results) { + Exception ex = null; + try { + ex = result.get(); + if (ex == null) { + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } catch (Exception e) { + ex = e; + } + + if (ex != null) { + log.warn("##load phase one failed!", ex); + partFailed = true; + } + } + + if (partFailed) { + List retryRecords = new ArrayList<>(); + for (List rows : totalRows) { + retryRecords.addAll(rows); + } + + context.getFailedRecords().clear(); + + Boolean skipException = sinkConfig.getSkipException(); + if (skipException != null && skipException) { + for (CanalConnectRecord retryRecord : retryRecords) { + DbLoadWorker worker = new DbLoadWorker(context, Arrays.asList(retryRecord), dbDialect, false, sinkConfig); + try { + Exception ex = worker.call(); + if (ex != null) { + // do skip + log.warn("skip exception for data : {} , caused by {}", + retryRecord, + ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } catch (Exception ex) { + // do skip + log.warn("skip exception for data : {} , caused by {}", + retryRecord, + ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } + } else { + DbLoadWorker worker = new DbLoadWorker(context, retryRecords, dbDialect, false, sinkConfig); + try { + Exception ex = worker.call(); + if (ex != null) { + throw ex; + } + } catch (Exception ex) { + log.error("##load phase two failed!", ex); + log.error("sink connector will shutdown by " + ex.getMessage(), ex); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); + executor.shutdown(); + System.exit(1); + } + } + } + } + + enum ExecuteResult { + SUCCESS, ERROR, RETRY + } + + class DbLoadWorker implements Callable { + + private final DbLoadContext context; + private final DbDialect dbDialect; + private final List records; + private final boolean canBatch; + + private final CanalSinkIncrementConfig sinkConfig; + + private final List allFailedRecords = new ArrayList<>(); + private final List allProcessedRecords = new ArrayList<>(); + private final List processedRecords = new ArrayList<>(); + private final List failedRecords = new ArrayList<>(); + + public DbLoadWorker(DbLoadContext context, List records, DbDialect dbDialect, boolean canBatch, + CanalSinkIncrementConfig sinkConfig) { + this.context = context; + this.records = records; + this.canBatch = canBatch; + this.dbDialect = dbDialect; + this.sinkConfig = sinkConfig; + } + + public Exception call() throws Exception { + try { + return doCall(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private Exception doCall() { + RuntimeException error = null; + ExecuteResult exeResult = null; + + if (sinkConfig.isGTIDMode()) { + int retryCount = 0; + final List toExecuteRecords = new ArrayList<>(); + try { + if (!CollectionUtils.isEmpty(failedRecords)) { + // if failedRecords not empty, make it retry + toExecuteRecords.addAll(failedRecords); + } else { + toExecuteRecords.addAll(records); + // add to failed record first, maybe get lob or datasource error + failedRecords.addAll(toExecuteRecords); + } + JdbcTemplate template = dbDialect.getJdbcTemplate(); + String sourceGtid = context.getGtid(); + if (StringUtils.isNotEmpty(sourceGtid) && !sinkConfig.isMariaDB()) { + String setMySQLGtid = "SET @@session.gtid_next = '" + sourceGtid + "';"; + template.execute(setMySQLGtid); + } else if (StringUtils.isNotEmpty(sourceGtid) && sinkConfig.isMariaDB()) { + throw new RuntimeException("unsupport gtid mode for mariaDB"); + } else { + log.error("gtid is empty in gtid mode"); + throw new RuntimeException("gtid is empty in gtid mode"); + } + + final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator(); + int affect = (Integer) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { + try { + failedRecords.clear(); + processedRecords.clear(); + int affect1 = 0; + for (CanalConnectRecord record : toExecuteRecords) { + int affects = template.update(record.getSql(), new PreparedStatementSetter() { + public void setValues(PreparedStatement ps) throws SQLException { + doPreparedStatement(ps, dbDialect, lobCreator, record); + } + }); + affect1 = affect1 + affects; + processStat(record, affects, false); + } + return affect1; + } catch (Exception e) { + // rollback + status.setRollbackOnly(); + throw new RuntimeException("Failed to executed", e); + } finally { + lobCreator.close(); + } + }); + + // reset gtid + if (sinkConfig.isMariaDB()) { + throw new RuntimeException("unsupport gtid mode for mariaDB"); + } else { + String resetMySQLGtid = "SET @@session.gtid_next = 'AUTOMATIC';"; + dbDialect.getJdbcTemplate().execute(resetMySQLGtid); + } + + error = null; + exeResult = ExecuteResult.SUCCESS; + } catch (DeadlockLoserDataAccessException ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.RETRY; + } catch (Throwable ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.ERROR; + } + + if (ExecuteResult.SUCCESS == exeResult) { + allFailedRecords.addAll(failedRecords); + allProcessedRecords.addAll(processedRecords); + failedRecords.clear(); + processedRecords.clear(); + } else if (ExecuteResult.RETRY == exeResult) { + retryCount = retryCount + 1; + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + int retry = 3; + if (retryCount >= retry) { + processFailedDatas(toExecuteRecords.size()); + throw new RuntimeException(String.format("execute retry %s times failed", retryCount), error); + } else { + try { + int retryWait = 3000; + int wait = retryCount * retryWait; + wait = Math.max(wait, retryWait); + Thread.sleep(wait); + } catch (InterruptedException ex) { + Thread.interrupted(); + processFailedDatas(toExecuteRecords.size()); + throw new RuntimeException(ex); + } + } + } else { + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + processFailedDatas(toExecuteRecords.size()); + throw error; + } + } else { + int index = 0; + while (index < records.size()) { + final List toExecuteRecords = new ArrayList<>(); + if (useBatch && canBatch) { + int end = Math.min(index + batchSize, records.size()); + toExecuteRecords.addAll(records.subList(index, end)); + index = end; + } else { + toExecuteRecords.add(records.get(index)); + index = index + 1; + } + + int retryCount = 0; + while (true) { + try { + if (!CollectionUtils.isEmpty(failedRecords)) { + toExecuteRecords.clear(); + toExecuteRecords.addAll(failedRecords); + } else { + failedRecords.addAll(toExecuteRecords); + } + + final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator(); + if (useBatch && canBatch) { + JdbcTemplate template = dbDialect.getJdbcTemplate(); + final String sql = toExecuteRecords.get(0).getSql(); + + int[] affects = new int[toExecuteRecords.size()]; + + affects = (int[]) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { + try { + failedRecords.clear(); + processedRecords.clear(); + int[] affects1 = template.batchUpdate(sql, new BatchPreparedStatementSetter() { + + public void setValues(PreparedStatement ps, int idx) throws SQLException { + doPreparedStatement(ps, dbDialect, lobCreator, toExecuteRecords.get(idx)); + } + + public int getBatchSize() { + return toExecuteRecords.size(); + } + }); + return affects1; + } catch (Exception e) { + // rollback + status.setRollbackOnly(); + throw new RuntimeException("Failed to execute batch with GTID", e); + } finally { + lobCreator.close(); + } + }); + + for (int i = 0; i < toExecuteRecords.size(); i++) { + assert affects != null; + processStat(toExecuteRecords.get(i), affects[i], true); + } + } else { + final CanalConnectRecord record = toExecuteRecords.get(0); + JdbcTemplate template = dbDialect.getJdbcTemplate(); + int affect = 0; + affect = (Integer) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { + try { + failedRecords.clear(); + processedRecords.clear(); + int affect1 = template.update(record.getSql(), new PreparedStatementSetter() { + + public void setValues(PreparedStatement ps) throws SQLException { + doPreparedStatement(ps, dbDialect, lobCreator, record); + } + }); + return affect1; + } catch (Exception e) { + // rollback + status.setRollbackOnly(); + throw new RuntimeException("Failed to executed", e); + } finally { + lobCreator.close(); + } + }); + processStat(record, affect, false); + } + + error = null; + exeResult = ExecuteResult.SUCCESS; + } catch (DeadlockLoserDataAccessException ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.RETRY; + } catch (Throwable ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.ERROR; + } + + if (ExecuteResult.SUCCESS == exeResult) { + allFailedRecords.addAll(failedRecords); + allProcessedRecords.addAll(processedRecords); + failedRecords.clear(); + processedRecords.clear(); + break; // do next eventData + } else if (ExecuteResult.RETRY == exeResult) { + retryCount = retryCount + 1; + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + int retry = 3; + if (retryCount >= retry) { + processFailedDatas(index); + throw new RuntimeException(String.format("execute retry %s times failed", retryCount), error); + } else { + try { + int retryWait = 3000; + int wait = retryCount * retryWait; + wait = Math.max(wait, retryWait); + Thread.sleep(wait); + } catch (InterruptedException ex) { + Thread.interrupted(); + processFailedDatas(index); + throw new RuntimeException(ex); + } + } + } else { + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + processFailedDatas(index); + throw error; + } + } + } + } + + context.getFailedRecords().addAll(allFailedRecords); + context.getProcessedRecords().addAll(allProcessedRecords); + return null; + } + + private void doPreparedStatement(PreparedStatement ps, DbDialect dbDialect, LobCreator lobCreator, + CanalConnectRecord record) throws SQLException { + EventType type = record.getEventType(); + List columns = new ArrayList(); + if (type.isInsert()) { + columns.addAll(record.getColumns()); + columns.addAll(record.getKeys()); + } else if (type.isDelete()) { + columns.addAll(record.getKeys()); + } else if (type.isUpdate()) { + boolean existOldKeys = !CollectionUtils.isEmpty(record.getOldKeys()); + columns.addAll(record.getUpdatedColumns()); + columns.addAll(record.getKeys()); + if (existOldKeys) { + columns.addAll(record.getOldKeys()); + } + } + + for (int i = 0; i < columns.size(); i++) { + int paramIndex = i + 1; + EventColumn column = columns.get(i); + int sqlType = column.getColumnType(); + + Object param = null; + if (dbDialect instanceof MysqlDialect + && (sqlType == Types.TIME || sqlType == Types.TIMESTAMP || sqlType == Types.DATE)) { + param = column.getColumnValue(); + } else { + param = SqlUtils.stringToSqlValue(column.getColumnValue(), + sqlType, + false, + dbDialect.isEmptyStringNulled()); + } + + try { + switch (sqlType) { + case Types.CLOB: + lobCreator.setClobAsString(ps, paramIndex, (String) param); + break; + + case Types.BLOB: + lobCreator.setBlobAsBytes(ps, paramIndex, (byte[]) param); + break; + case Types.TIME: + case Types.TIMESTAMP: + case Types.DATE: + if (dbDialect instanceof MysqlDialect) { + ps.setObject(paramIndex, param); + } else { + StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); + } + break; + case Types.BIT: + if (dbDialect instanceof MysqlDialect) { + StatementCreatorUtils.setParameterValue(ps, paramIndex, Types.DECIMAL, null, param); + } else { + StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); + } + break; + default: + StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); + break; + } + } catch (SQLException ex) { + log.error("## SetParam error , [pairId={}, sqltype={}, value={}]", + record.getPairId(), sqlType, param); + throw ex; + } + } + } + + private void processStat(CanalConnectRecord record, int affect, boolean batch) { + if (batch && (affect < 1 && affect != Statement.SUCCESS_NO_INFO)) { + failedRecords.add(record); + } else if (!batch && affect < 1) { + failedRecords.add(record); + } else { + processedRecords.add(record); + // this.processStat(record, context); + } + } + + private void processFailedDatas(int index) { + allFailedRecords.addAll(failedRecords); + context.getFailedRecords().addAll(allFailedRecords); + for (; index < records.size(); index++) { + context.getFailedRecords().add(records.get(index)); + } + allProcessedRecords.addAll(processedRecords); + context.getProcessedRecords().addAll(allProcessedRecords); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java index 75572a5faf..5a6ceb7c3f 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.canal.source; -import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceIncrementConfig; import org.apache.eventmesh.connector.canal.CanalConnectRecord; import org.apache.eventmesh.connector.canal.model.EventColumn; import org.apache.eventmesh.connector.canal.model.EventColumnIndexComparable; @@ -48,7 +48,7 @@ @Slf4j public class EntryParser { - public static Map> parse(CanalSourceConfig sourceConfig, List datas, + public static Map> parse(CanalSourceIncrementConfig sourceConfig, List datas, RdbTableMgr tables) { List recordList = new ArrayList<>(); List transactionDataBuffer = new ArrayList<>(); @@ -90,12 +90,12 @@ public static Map> parse(CanalSourceConfig source return recordMap; } - private static boolean checkGtidForEntry(Entry entry, CanalSourceConfig sourceConfig) { + private static boolean checkGtidForEntry(Entry entry, CanalSourceIncrementConfig sourceConfig) { String currentGtid = entry.getHeader().getPropsList().get(0).getValue(); return currentGtid.contains(sourceConfig.getServerUUID()); } - private static void parseRecordListWithEntryBuffer(CanalSourceConfig sourceConfig, + private static void parseRecordListWithEntryBuffer(CanalSourceIncrementConfig sourceConfig, List recordList, List transactionDataBuffer, RdbTableMgr tables) { for (Entry bufferEntry : transactionDataBuffer) { @@ -115,13 +115,13 @@ private static void parseRecordListWithEntryBuffer(CanalSourceConfig sourceConfi } } - private static boolean checkNeedSync(CanalSourceConfig sourceConfig, RowChange rowChange) { + private static boolean checkNeedSync(CanalSourceIncrementConfig sourceConfig, RowChange rowChange) { Column markedColumn = null; CanalEntry.EventType eventType = rowChange.getEventType(); - if (eventType.equals(CanalEntry.EventType.DELETE) || eventType.equals(CanalEntry.EventType.UPDATE)) { + if (eventType.equals(CanalEntry.EventType.DELETE)) { markedColumn = getColumnIgnoreCase(rowChange.getRowDatas(0).getBeforeColumnsList(), sourceConfig.getNeedSyncMarkTableColumnName()); - } else if (eventType.equals(CanalEntry.EventType.INSERT)) { + } else if (eventType.equals(CanalEntry.EventType.INSERT) || eventType.equals(CanalEntry.EventType.UPDATE)) { markedColumn = getColumnIgnoreCase(rowChange.getRowDatas(0).getAfterColumnsList(), sourceConfig.getNeedSyncMarkTableColumnName()); } @@ -141,7 +141,7 @@ private static Column getColumnIgnoreCase(List columns, String columName return null; } - private static List internParse(CanalSourceConfig sourceConfig, Entry entry, + private static List internParse(CanalSourceIncrementConfig sourceConfig, Entry entry, RdbTableMgr tableMgr) { String schemaName = entry.getHeader().getSchemaName(); String tableName = entry.getHeader().getTableName(); @@ -180,7 +180,7 @@ private static List internParse(CanalSourceConfig sourceConf return recordList; } - private static CanalConnectRecord internParse(CanalSourceConfig canalSourceConfig, Entry entry, + private static CanalConnectRecord internParse(CanalSourceIncrementConfig canalSourceConfig, Entry entry, RowChange rowChange, RowData rowData) { CanalConnectRecord canalConnectRecord = new CanalConnectRecord(); canalConnectRecord.setTableName(entry.getHeader().getTableName()); diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java index 062bbb93a8..c0b2063d28 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java @@ -32,6 +32,7 @@ import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -83,6 +84,7 @@ public CanalFullProducer(BlockingQueue> queue, DataSource da public void choosePrimaryKey() { for (RdbColumnDefinition col : tableDefinition.getColumnDefinitions().values()) { if (position.getCurPrimaryKeyCols().get(col.getName()) != null) { + // random choose the first primary key from the table choosePrimaryKey.set(col.getName()); log.info("schema [{}] table [{}] choose primary key [{}]", tableDefinition.getSchemaName(), tableDefinition.getTableName(), col.getName()); @@ -95,6 +97,7 @@ public void choosePrimaryKey() { public void start(AtomicBoolean flag) { choosePrimaryKey(); + // used to page query boolean isFirstSelect = true; List> rows = new LinkedList<>(); while (flag.get()) { @@ -120,6 +123,7 @@ public void start(AtomicBoolean flag) { continue; } refreshPosition(lastCol); + // may be not reach commitConnectRecord(rows); rows = new LinkedList<>(); } @@ -127,6 +131,7 @@ public void start(AtomicBoolean flag) { if (lastCol == null || checkIsScanFinish(lastCol)) { log.info("full scan db [{}] table [{}] finish", tableDefinition.getSchemaName(), tableDefinition.getTableName()); + // commit the last record if rows.size() < flushSize commitConnectRecord(rows); return; } @@ -164,7 +169,8 @@ private void commitConnectRecord(List> rows) throws Interrup offset.setPosition(jobRdbFullPosition); CanalFullRecordPartition partition = new CanalFullRecordPartition(); ArrayList records = new ArrayList<>(); - records.add(new ConnectRecord(partition, offset, System.currentTimeMillis(), rows)); + byte[] rowsData = JsonUtils.toJSONString(rows).getBytes(StandardCharsets.UTF_8); + records.add(new ConnectRecord(partition, offset, System.currentTimeMillis(), rowsData)); queue.put(records); } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java new file mode 100644 index 0000000000..841c9a4814 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.AbstractComponent; +import org.apache.eventmesh.common.EventMeshThreadFactory; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.source.position.CanalFullPositionMgr; +import org.apache.eventmesh.connector.canal.source.position.TableFullPosition; +import org.apache.eventmesh.connector.canal.source.table.RdbSimpleTable; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSourceCheckConnector extends AbstractComponent implements Source, ConnectorCreateService { + private CanalSourceFullConfig config; + private CanalFullPositionMgr positionMgr; + private RdbTableMgr tableMgr; + private ThreadPoolExecutor executor; + private final BlockingQueue> queue = new LinkedBlockingQueue<>(); + private final AtomicBoolean flag = new AtomicBoolean(true); + + @Override + protected void run() throws Exception { + this.tableMgr.start(); + this.positionMgr.start(); + if (positionMgr.isFinished()) { + log.info("connector [{}] has finished the job", config.getSourceConnectorConfig().getConnectorName()); + return; + } + executor = new ThreadPoolExecutor(config.getParallel(), config.getParallel(), 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(), new EventMeshThreadFactory("canal-source-full")); + List producers = new LinkedList<>(); + if (config.getSourceConnectorConfig().getDatabases() != null) { + for (RdbDBDefinition db : config.getSourceConnectorConfig().getDatabases()) { + for (RdbTableDefinition table : db.getTables()) { + try { + log.info("it will create producer of db [{}] table [{}]", db.getSchemaName(), table.getTableName()); + RdbSimpleTable simpleTable = new RdbSimpleTable(db.getSchemaName(), table.getTableName()); + JobRdbFullPosition position = positionMgr.getPosition(simpleTable); + if (position == null) { + throw new EventMeshException(String.format("db [%s] table [%s] have none position info", + db.getSchemaName(), table.getTableName())); + } + RdbTableDefinition tableDefinition = tableMgr.getTable(simpleTable); + if (tableDefinition == null) { + throw new EventMeshException(String.format("db [%s] table [%s] have none table definition info", + db.getSchemaName(), table.getTableName())); + } + + producers.add(new CanalFullProducer(queue, DatabaseConnection.sourceDataSource, (MySQLTableDef) tableDefinition, + JsonUtils.parseObject(position.getPrimaryKeyRecords(), TableFullPosition.class), + config.getFlushSize())); + } catch (Exception e) { + log.error("create schema [{}] table [{}] producers fail", db.getSchemaName(), + table.getTableName(), e); + } + } + } + } + producers.forEach(p -> executor.execute(() -> p.start(flag))); + } + + @Override + protected void shutdown() throws Exception { + flag.set(false); + if (!executor.isShutdown()) { + executor.shutdown(); + try { + if (!executor.awaitTermination(5, TimeUnit.SECONDS)) { + log.warn("wait thread pool shutdown timeout, it will shutdown now"); + executor.shutdownNow(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + log.info("shutdown thread pool fail"); + } + } + if (DatabaseConnection.sourceDataSource != null) { + DatabaseConnection.sourceDataSource.close(); + log.info("data source has been closed"); + } + } + + @Override + public Source create() { + return new CanalSourceCheckConnector(); + } + + @Override + public Class configClass() { + return CanalSourceFullConfig.class; + } + + @Override + public void init(Config config) throws Exception { + this.config = (CanalSourceFullConfig) config; + init(); + } + + private void init() { + DatabaseConnection.sourceConfig = this.config.getSourceConnectorConfig(); + DatabaseConnection.initSourceConnection(); + this.tableMgr = new RdbTableMgr(config.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); + this.positionMgr = new CanalFullPositionMgr(config, tableMgr); + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + this.config = (CanalSourceFullConfig) sourceConnectorContext.getSourceConfig(); + init(); + } + + @Override + public void commit(ConnectRecord record) { + // nothing + } + + @Override + public String name() { + return this.config.getSourceConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public List poll() { + while (flag.get()) { + try { + List records = queue.poll(5, TimeUnit.SECONDS); + if (records == null || records.isEmpty()) { + continue; + } + return records; + } catch (InterruptedException ignore) { + Thread.currentThread().interrupt(); + log.info("[{}] thread interrupted", this.getClass()); + return null; + } + } + log.info("[{}] life flag is stop, so return null", this.getClass()); + return null; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java index ea5ccdeed0..e24301ae07 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java @@ -19,53 +19,14 @@ import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; -import org.apache.eventmesh.common.remote.offset.RecordPosition; -import org.apache.eventmesh.common.remote.offset.canal.CanalRecordOffset; -import org.apache.eventmesh.common.remote.offset.canal.CanalRecordPartition; -import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.connector.canal.CanalConnectRecord; -import org.apache.eventmesh.connector.canal.DatabaseConnection; -import org.apache.eventmesh.connector.canal.source.EntryParser; -import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.common.remote.job.JobType; import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.commons.lang3.StringUtils; - -import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.LockSupport; - -import com.alibaba.otter.canal.instance.core.CanalInstance; -import com.alibaba.otter.canal.instance.core.CanalInstanceGenerator; -import com.alibaba.otter.canal.instance.manager.CanalInstanceWithManager; -import com.alibaba.otter.canal.instance.manager.model.Canal; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.ClusterMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.HAMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.IndexMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.MetaMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.RunMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.SourcingType; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.StorageMode; -import com.alibaba.otter.canal.parse.CanalEventParser; -import com.alibaba.otter.canal.parse.ha.CanalHAController; -import com.alibaba.otter.canal.parse.inbound.mysql.MysqlEventParser; -import com.alibaba.otter.canal.protocol.CanalEntry; -import com.alibaba.otter.canal.protocol.CanalEntry.Entry; -import com.alibaba.otter.canal.protocol.ClientIdentity; -import com.alibaba.otter.canal.server.embedded.CanalServerWithEmbedded; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; import lombok.extern.slf4j.Slf4j; @@ -74,19 +35,7 @@ public class CanalSourceConnector implements Source, ConnectorCreateService configClass() { @@ -102,280 +51,48 @@ public void init(Config config) throws Exception { @Override public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; - this.sourceConfig = (CanalSourceConfig) sourceConnectorContext.getSourceConfig(); - if (sourceConnectorContext.getRecordPositionList() != null) { - this.sourceConfig.setRecordPositions(sourceConnectorContext.getRecordPositionList()); - } - - if (StringUtils.isNotEmpty(sourceConfig.getTableFilter())) { - tableFilter = sourceConfig.getTableFilter(); - } - if (StringUtils.isNotEmpty(sourceConfig.getFieldFilter())) { - fieldFilter = sourceConfig.getFieldFilter(); - } - - canalServer = CanalServerWithEmbedded.instance(); - - canalServer.setCanalInstanceGenerator(new CanalInstanceGenerator() { - @Override - public CanalInstance generate(String destination) { - Canal canal = buildCanal(sourceConfig); - - CanalInstanceWithManager instance = new CanalInstanceWithManager(canal, tableFilter) { - - protected CanalHAController initHaController() { - return super.initHaController(); - } - - protected void startEventParserInternal(CanalEventParser parser, boolean isGroup) { - super.startEventParserInternal(parser, isGroup); - - if (eventParser instanceof MysqlEventParser) { - // set eventParser support type - ((MysqlEventParser) eventParser).setSupportBinlogFormats("ROW"); - ((MysqlEventParser) eventParser).setSupportBinlogImages("FULL"); - MysqlEventParser mysqlEventParser = (MysqlEventParser) eventParser; - mysqlEventParser.setParallel(false); - if (StringUtils.isNotEmpty(fieldFilter)) { - mysqlEventParser.setFieldFilter(fieldFilter); - } - - CanalHAController haController = mysqlEventParser.getHaController(); - if (!haController.isStart()) { - haController.start(); - } - } - } - }; - return instance; - } - }); - DatabaseConnection.sourceConfig = sourceConfig.getSourceConnectorConfig(); - DatabaseConnection.initSourceConnection(); - tableMgr = new RdbTableMgr(sourceConfig.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); - } - - private Canal buildCanal(CanalSourceConfig sourceConfig) { - long slaveId = 10000; - if (sourceConfig.getSlaveId() != null) { - slaveId = sourceConfig.getSlaveId(); - } - - Canal canal = new Canal(); - canal.setId(sourceConfig.getCanalInstanceId()); - canal.setName(sourceConfig.getDestination()); - canal.setDesc(sourceConfig.getDesc()); - - CanalParameter parameter = new CanalParameter(); - - parameter.setRunMode(RunMode.EMBEDDED); - parameter.setClusterMode(ClusterMode.STANDALONE); - parameter.setMetaMode(MetaMode.MEMORY); - parameter.setHaMode(HAMode.HEARTBEAT); - parameter.setIndexMode(IndexMode.MEMORY); - parameter.setStorageMode(StorageMode.MEMORY); - parameter.setMemoryStorageBufferSize(32 * 1024); - - parameter.setSourcingType(SourcingType.MYSQL); - parameter.setDbAddresses(Collections.singletonList(new InetSocketAddress(sourceConfig.getSourceConnectorConfig().getDbAddress(), - sourceConfig.getSourceConnectorConfig().getDbPort()))); - parameter.setDbUsername(sourceConfig.getSourceConnectorConfig().getUserName()); - parameter.setDbPassword(sourceConfig.getSourceConnectorConfig().getPassWord()); - - // set if enabled gtid mode - parameter.setGtidEnable(sourceConfig.isGTIDMode()); - - // check positions - // example: Arrays.asList("{\"journalName\":\"mysql-bin.000001\",\"position\":6163L,\"timestamp\":1322803601000L}", - // "{\"journalName\":\"mysql-bin.000001\",\"position\":6163L,\"timestamp\":1322803601000L}") - if (sourceConfig.getRecordPositions() != null && !sourceConfig.getRecordPositions().isEmpty()) { - List recordPositions = sourceConfig.getRecordPositions(); - List positions = new ArrayList<>(); - recordPositions.forEach(recordPosition -> { - Map recordPositionMap = new HashMap<>(); - CanalRecordPartition canalRecordPartition = (CanalRecordPartition) (recordPosition.getRecordPartition()); - CanalRecordOffset canalRecordOffset = (CanalRecordOffset) (recordPosition.getRecordOffset()); - recordPositionMap.put("journalName", canalRecordPartition.getJournalName()); - recordPositionMap.put("timestamp", canalRecordPartition.getTimeStamp()); - recordPositionMap.put("position", canalRecordOffset.getOffset()); - // for mariaDB not support gtid mode - if (sourceConfig.isGTIDMode() && !sourceConfig.isMariaDB()) { - String gtidRange = canalRecordOffset.getGtid(); - if (gtidRange != null) { - if (canalRecordOffset.getCurrentGtid() != null) { - gtidRange = EntryParser.replaceGtidRange(canalRecordOffset.getGtid(), canalRecordOffset.getCurrentGtid(), - sourceConfig.getServerUUID()); - } - recordPositionMap.put("gtid", gtidRange); - } - } - positions.add(JsonUtils.toJSONString(recordPositionMap)); - }); - parameter.setPositions(positions); + if (sourceConnectorContext.getJobType().equals(JobType.FULL)) { + this.source = new CanalSourceFullConnector(); + } else if (sourceConnectorContext.getJobType().equals(JobType.INCREASE)) { + this.source = new CanalSourceIncrementConnector(); + } else if (sourceConnectorContext.getJobType().equals(JobType.CHECK)) { + this.source = new CanalSourceCheckConnector(); + } else { + throw new RuntimeException("unsupported job type " + sourceConnectorContext.getJobType()); } - - parameter.setSlaveId(slaveId); - - parameter.setDefaultConnectionTimeoutInSeconds(30); - parameter.setConnectionCharset("UTF-8"); - parameter.setConnectionCharsetNumber((byte) 33); - parameter.setReceiveBufferSize(8 * 1024); - parameter.setSendBufferSize(8 * 1024); - - // heartbeat detect - parameter.setDetectingEnable(false); - - parameter.setDdlIsolation(sourceConfig.isDdlSync()); - parameter.setFilterTableError(sourceConfig.isFilterTableError()); - parameter.setMemoryStorageRawEntry(false); - - canal.setCanalParameter(parameter); - return canal; + this.source.init(sourceConnectorContext); } @Override public void start() throws Exception { - if (running) { - return; - } - tableMgr.start(); - canalServer.start(); - - canalServer.start(sourceConfig.getDestination()); - this.clientIdentity = new ClientIdentity(sourceConfig.getDestination(), sourceConfig.getClientId(), tableFilter); - canalServer.subscribe(clientIdentity); - - running = true; + this.source.start(); } @Override public void commit(ConnectRecord record) { - long batchId = Long.parseLong(record.getExtension("messageId")); - int batchIndex = record.getExtension("batchIndex", Integer.class); - int totalBatches = record.getExtension("totalBatches", Integer.class); - if (batchIndex == totalBatches - 1) { - log.debug("ack records batchIndex:{}, totalBatches:{}, batchId:{}", - batchIndex, totalBatches, batchId); - canalServer.ack(clientIdentity, batchId); - } + this.source.commit(record); } @Override public String name() { - return this.sourceConfig.getSourceConnectorConfig().getConnectorName(); + return this.source.name(); } @Override public void onException(ConnectRecord record) { - + this.source.onException(record); } @Override - public void stop() { - if (!running) { - return; - } - running = false; - canalServer.stop(sourceConfig.getDestination()); - canalServer.stop(); + public void stop() throws Exception { + this.source.stop(); } @Override public List poll() { - int emptyTimes = 0; - com.alibaba.otter.canal.protocol.Message message = null; - if (sourceConfig.getBatchTimeout() < 0) { - while (running) { - message = canalServer.getWithoutAck(clientIdentity, sourceConfig.getBatchSize()); - if (message == null || message.getId() == -1L) { // empty - applyWait(emptyTimes++); - } else { - break; - } - } - } else { // perform with timeout - while (running) { - message = - canalServer.getWithoutAck(clientIdentity, sourceConfig.getBatchSize(), sourceConfig.getBatchTimeout(), TimeUnit.MILLISECONDS); - if (message == null || message.getId() == -1L) { // empty - continue; - } - break; - } - } - - List entries; - assert message != null; - if (message.isRaw()) { - entries = new ArrayList<>(message.getRawEntries().size()); - for (ByteString entry : message.getRawEntries()) { - try { - entries.add(CanalEntry.Entry.parseFrom(entry)); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - } - } else { - entries = message.getEntries(); - } - - List result = new ArrayList<>(); - // key: Xid offset - Map> connectorRecordMap = EntryParser.parse(sourceConfig, entries, tableMgr); - - if (!connectorRecordMap.isEmpty()) { - Set>> entrySet = connectorRecordMap.entrySet(); - for (Map.Entry> entry : entrySet) { - List connectRecordList = entry.getValue(); - CanalConnectRecord lastRecord = entry.getValue().get(connectRecordList.size() - 1); - CanalRecordPartition canalRecordPartition = new CanalRecordPartition(); - canalRecordPartition.setServerUUID(sourceConfig.getServerUUID()); - canalRecordPartition.setJournalName(lastRecord.getJournalName()); - canalRecordPartition.setTimeStamp(lastRecord.getExecuteTime()); - // Xid offset with gtid - Long binLogOffset = entry.getKey(); - CanalRecordOffset canalRecordOffset = new CanalRecordOffset(); - canalRecordOffset.setOffset(binLogOffset); - if (StringUtils.isNotEmpty(lastRecord.getGtid()) && StringUtils.isNotEmpty(lastRecord.getCurrentGtid())) { - canalRecordOffset.setGtid(lastRecord.getGtid()); - canalRecordOffset.setCurrentGtid(lastRecord.getCurrentGtid()); - } - - // split record list - List> splitLists = new ArrayList<>(); - for (int i = 0; i < connectRecordList.size(); i += sourceConfig.getBatchSize()) { - int end = Math.min(i + sourceConfig.getBatchSize(), connectRecordList.size()); - List subList = connectRecordList.subList(i, end); - splitLists.add(subList); - } - - for (int i = 0; i < splitLists.size(); i++) { - ConnectRecord connectRecord = new ConnectRecord(canalRecordPartition, canalRecordOffset, System.currentTimeMillis()); - connectRecord.addExtension("messageId", String.valueOf(message.getId())); - connectRecord.addExtension("batchIndex", i); - connectRecord.addExtension("totalBatches", splitLists.size()); - connectRecord.setData(splitLists.get(i)); - result.add(connectRecord); - } - } - } else { - // for the message has been filtered need ack message - canalServer.ack(clientIdentity, message.getId()); - } - - return result; - } - - // Handle the situation of no data and avoid empty loop death - private void applyWait(int emptyTimes) { - int newEmptyTimes = Math.min(emptyTimes, maxEmptyTimes); - if (emptyTimes <= 3) { - Thread.yield(); - } else { - LockSupport.parkNanos(1000 * 1000L * newEmptyTimes); - } + return this.source.poll(); } @Override diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java index 97730463b5..c2632ee472 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java @@ -20,6 +20,7 @@ import org.apache.eventmesh.common.AbstractComponent; import org.apache.eventmesh.common.EventMeshThreadFactory; import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceFullConfig; import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; @@ -32,11 +33,11 @@ import org.apache.eventmesh.connector.canal.source.position.TableFullPosition; import org.apache.eventmesh.connector.canal.source.table.RdbSimpleTable; import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; -import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.util.LinkedList; import java.util.List; @@ -49,7 +50,8 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class CanalSourceFullConnector extends AbstractComponent implements Source, ConnectorCreateService { +public class CanalSourceFullConnector extends AbstractComponent implements Source { + private CanalSourceFullConfig config; private CanalFullPositionMgr positionMgr; private RdbTableMgr tableMgr; @@ -62,14 +64,14 @@ protected void run() throws Exception { this.tableMgr.start(); this.positionMgr.start(); if (positionMgr.isFinished()) { - log.info("connector [{}] has finished the job", config.getConnectorConfig().getConnectorName()); + log.info("connector [{}] has finished the job", config.getSourceConnectorConfig().getConnectorName()); return; } executor = new ThreadPoolExecutor(config.getParallel(), config.getParallel(), 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new EventMeshThreadFactory("canal-source-full")); List producers = new LinkedList<>(); - if (config.getConnectorConfig().getDatabases() != null) { - for (RdbDBDefinition db : config.getConnectorConfig().getDatabases()) { + if (config.getSourceConnectorConfig().getDatabases() != null) { + for (RdbDBDefinition db : config.getSourceConnectorConfig().getDatabases()) { for (RdbTableDefinition table : db.getTables()) { try { log.info("it will create producer of db [{}] table [{}]", db.getSchemaName(), table.getTableName()); @@ -119,11 +121,6 @@ protected void shutdown() throws Exception { } } - @Override - public Source create() { - return new CanalSourceFullConnector(); - } - @Override public Class configClass() { return CanalSourceFullConfig.class; @@ -136,16 +133,17 @@ public void init(Config config) throws Exception { } private void init() { - DatabaseConnection.sourceConfig = this.config.getConnectorConfig(); + DatabaseConnection.sourceConfig = this.config.getSourceConnectorConfig(); DatabaseConnection.initSourceConnection(); - this.tableMgr = new RdbTableMgr(config.getConnectorConfig(), DatabaseConnection.sourceDataSource); + this.tableMgr = new RdbTableMgr(config.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); this.positionMgr = new CanalFullPositionMgr(config, tableMgr); } @Override public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; - this.config = (CanalSourceFullConfig) sourceConnectorContext.getSourceConfig(); + CanalSourceConfig canalSourceConfig = (CanalSourceConfig) sourceConnectorContext.getSourceConfig(); + this.config = ConfigUtil.parse(canalSourceConfig.getSourceConfig(), CanalSourceFullConfig.class); init(); } @@ -156,7 +154,7 @@ public void commit(ConnectRecord record) { @Override public String name() { - return this.config.getConnectorConfig().getConnectorName(); + return this.config.getSourceConnectorConfig().getConnectorName(); } @Override diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java new file mode 100644 index 0000000000..4f7041b478 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java @@ -0,0 +1,383 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceIncrementConfig; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordOffset; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordPartition; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.source.EntryParser; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import org.apache.commons.lang3.StringUtils; + +import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.LockSupport; + +import com.alibaba.otter.canal.instance.core.CanalInstance; +import com.alibaba.otter.canal.instance.core.CanalInstanceGenerator; +import com.alibaba.otter.canal.instance.manager.CanalInstanceWithManager; +import com.alibaba.otter.canal.instance.manager.model.Canal; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.ClusterMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.HAMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.IndexMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.MetaMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.RunMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.SourcingType; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.StorageMode; +import com.alibaba.otter.canal.parse.CanalEventParser; +import com.alibaba.otter.canal.parse.ha.CanalHAController; +import com.alibaba.otter.canal.parse.inbound.mysql.MysqlEventParser; +import com.alibaba.otter.canal.protocol.CanalEntry.Entry; +import com.alibaba.otter.canal.protocol.ClientIdentity; +import com.alibaba.otter.canal.server.embedded.CanalServerWithEmbedded; +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSourceIncrementConnector implements Source { + + private CanalSourceIncrementConfig sourceConfig; + + private CanalServerWithEmbedded canalServer; + + private ClientIdentity clientIdentity; + + private String tableFilter = null; + + private String fieldFilter = null; + + private volatile boolean running = false; + + private static final int maxEmptyTimes = 10; + + private RdbTableMgr tableMgr; + + @Override + public Class configClass() { + return CanalSourceConfig.class; + } + + @Override + public void init(Config config) throws Exception { + // init config for canal source connector + this.sourceConfig = (CanalSourceIncrementConfig) config; + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + CanalSourceConfig canalSourceConfig = (CanalSourceConfig) sourceConnectorContext.getSourceConfig(); + this.sourceConfig = ConfigUtil.parse(canalSourceConfig.getSourceConfig(), CanalSourceIncrementConfig.class); + if (sourceConnectorContext.getRecordPositionList() != null) { + this.sourceConfig.setRecordPositions(sourceConnectorContext.getRecordPositionList()); + } + + if (StringUtils.isNotEmpty(sourceConfig.getTableFilter())) { + tableFilter = sourceConfig.getTableFilter(); + } + if (StringUtils.isNotEmpty(sourceConfig.getFieldFilter())) { + fieldFilter = sourceConfig.getFieldFilter(); + } + + canalServer = CanalServerWithEmbedded.instance(); + + canalServer.setCanalInstanceGenerator(new CanalInstanceGenerator() { + @Override + public CanalInstance generate(String destination) { + Canal canal = buildCanal(sourceConfig); + + CanalInstanceWithManager instance = new CanalInstanceWithManager(canal, tableFilter) { + + protected CanalHAController initHaController() { + return super.initHaController(); + } + + protected void startEventParserInternal(CanalEventParser parser, boolean isGroup) { + super.startEventParserInternal(parser, isGroup); + + if (eventParser instanceof MysqlEventParser) { + // set eventParser support type + ((MysqlEventParser) eventParser).setSupportBinlogFormats("ROW"); + ((MysqlEventParser) eventParser).setSupportBinlogImages("FULL"); + MysqlEventParser mysqlEventParser = (MysqlEventParser) eventParser; + mysqlEventParser.setParallel(false); + if (StringUtils.isNotEmpty(fieldFilter)) { + mysqlEventParser.setFieldFilter(fieldFilter); + } + + CanalHAController haController = mysqlEventParser.getHaController(); + if (!haController.isStart()) { + haController.start(); + } + } + } + }; + return instance; + } + }); + DatabaseConnection.sourceConfig = sourceConfig.getSourceConnectorConfig(); + DatabaseConnection.initSourceConnection(); + tableMgr = new RdbTableMgr(sourceConfig.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); + } + + private Canal buildCanal(CanalSourceIncrementConfig sourceConfig) { + long slaveId = 10000; + if (sourceConfig.getSlaveId() != null) { + slaveId = sourceConfig.getSlaveId(); + } + + Canal canal = new Canal(); + canal.setId(sourceConfig.getCanalInstanceId()); + canal.setName(sourceConfig.getDestination()); + canal.setDesc(sourceConfig.getDesc()); + + CanalParameter parameter = new CanalParameter(); + + parameter.setRunMode(RunMode.EMBEDDED); + parameter.setClusterMode(ClusterMode.STANDALONE); + parameter.setMetaMode(MetaMode.MEMORY); + parameter.setHaMode(HAMode.HEARTBEAT); + parameter.setIndexMode(IndexMode.MEMORY); + parameter.setStorageMode(StorageMode.MEMORY); + parameter.setMemoryStorageBufferSize(32 * 1024); + + parameter.setSourcingType(SourcingType.MYSQL); + parameter.setDbAddresses(Collections.singletonList(new InetSocketAddress(sourceConfig.getSourceConnectorConfig().getDbAddress(), + sourceConfig.getSourceConnectorConfig().getDbPort()))); + parameter.setDbUsername(sourceConfig.getSourceConnectorConfig().getUserName()); + parameter.setDbPassword(sourceConfig.getSourceConnectorConfig().getPassWord()); + + // set if enabled gtid mode + parameter.setGtidEnable(sourceConfig.isGTIDMode()); + + // check positions + // example: Arrays.asList("{\"journalName\":\"mysql-bin.000001\",\"position\":6163L,\"timestamp\":1322803601000L}", + // "{\"journalName\":\"mysql-bin.000001\",\"position\":6163L,\"timestamp\":1322803601000L}") + if (sourceConfig.getRecordPositions() != null && !sourceConfig.getRecordPositions().isEmpty()) { + List recordPositions = sourceConfig.getRecordPositions(); + List positions = new ArrayList<>(); + recordPositions.forEach(recordPosition -> { + Map recordPositionMap = new HashMap<>(); + CanalRecordPartition canalRecordPartition = (CanalRecordPartition) (recordPosition.getRecordPartition()); + CanalRecordOffset canalRecordOffset = (CanalRecordOffset) (recordPosition.getRecordOffset()); + recordPositionMap.put("journalName", canalRecordPartition.getJournalName()); + recordPositionMap.put("timestamp", canalRecordPartition.getTimeStamp()); + recordPositionMap.put("position", canalRecordOffset.getOffset()); + // for mariaDB not support gtid mode + if (sourceConfig.isGTIDMode() && !sourceConfig.isMariaDB()) { + String gtidRange = canalRecordOffset.getGtid(); + if (gtidRange != null) { + if (canalRecordOffset.getCurrentGtid() != null) { + gtidRange = EntryParser.replaceGtidRange(canalRecordOffset.getGtid(), canalRecordOffset.getCurrentGtid(), + sourceConfig.getServerUUID()); + } + recordPositionMap.put("gtid", gtidRange); + } + } + positions.add(JsonUtils.toJSONString(recordPositionMap)); + }); + parameter.setPositions(positions); + } + + parameter.setSlaveId(slaveId); + + parameter.setDefaultConnectionTimeoutInSeconds(30); + parameter.setConnectionCharset("UTF-8"); + parameter.setConnectionCharsetNumber((byte) 33); + parameter.setReceiveBufferSize(8 * 1024); + parameter.setSendBufferSize(8 * 1024); + + // heartbeat detect + parameter.setDetectingEnable(false); + + parameter.setDdlIsolation(sourceConfig.isDdlSync()); + parameter.setFilterTableError(sourceConfig.isFilterTableError()); + parameter.setMemoryStorageRawEntry(false); + + canal.setCanalParameter(parameter); + return canal; + } + + + @Override + public void start() throws Exception { + if (running) { + return; + } + tableMgr.start(); + canalServer.start(); + + canalServer.start(sourceConfig.getDestination()); + this.clientIdentity = new ClientIdentity(sourceConfig.getDestination(), sourceConfig.getClientId(), tableFilter); + canalServer.subscribe(clientIdentity); + + running = true; + } + + + @Override + public void commit(ConnectRecord record) { + long batchId = Long.parseLong(record.getExtension("messageId")); + int batchIndex = record.getExtension("batchIndex", Integer.class); + int totalBatches = record.getExtension("totalBatches", Integer.class); + if (batchIndex == totalBatches - 1) { + log.debug("ack records batchIndex:{}, totalBatches:{}, batchId:{}", + batchIndex, totalBatches, batchId); + canalServer.ack(clientIdentity, batchId); + } + } + + @Override + public String name() { + return this.sourceConfig.getSourceConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void stop() { + if (!running) { + return; + } + running = false; + canalServer.stop(sourceConfig.getDestination()); + canalServer.stop(); + } + + @Override + public List poll() { + int emptyTimes = 0; + com.alibaba.otter.canal.protocol.Message message = null; + if (sourceConfig.getBatchTimeout() < 0) { + while (running) { + message = canalServer.getWithoutAck(clientIdentity, sourceConfig.getBatchSize()); + if (message == null || message.getId() == -1L) { // empty + applyWait(emptyTimes++); + } else { + break; + } + } + } else { // perform with timeout + while (running) { + message = + canalServer.getWithoutAck(clientIdentity, sourceConfig.getBatchSize(), sourceConfig.getBatchTimeout(), TimeUnit.MILLISECONDS); + if (message == null || message.getId() == -1L) { // empty + continue; + } + break; + } + } + + List entries; + assert message != null; + if (message.isRaw()) { + entries = new ArrayList<>(message.getRawEntries().size()); + for (ByteString entry : message.getRawEntries()) { + try { + entries.add(Entry.parseFrom(entry)); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } + } + } else { + entries = message.getEntries(); + } + + List result = new ArrayList<>(); + // key: Xid offset + Map> connectorRecordMap = EntryParser.parse(sourceConfig, entries, tableMgr); + + if (!connectorRecordMap.isEmpty()) { + Set>> entrySet = connectorRecordMap.entrySet(); + for (Map.Entry> entry : entrySet) { + List connectRecordList = entry.getValue(); + CanalConnectRecord lastRecord = entry.getValue().get(connectRecordList.size() - 1); + CanalRecordPartition canalRecordPartition = new CanalRecordPartition(); + canalRecordPartition.setServerUUID(sourceConfig.getServerUUID()); + canalRecordPartition.setJournalName(lastRecord.getJournalName()); + canalRecordPartition.setTimeStamp(lastRecord.getExecuteTime()); + // Xid offset with gtid + Long binLogOffset = entry.getKey(); + CanalRecordOffset canalRecordOffset = new CanalRecordOffset(); + canalRecordOffset.setOffset(binLogOffset); + if (StringUtils.isNotEmpty(lastRecord.getGtid()) && StringUtils.isNotEmpty(lastRecord.getCurrentGtid())) { + canalRecordOffset.setGtid(lastRecord.getGtid()); + canalRecordOffset.setCurrentGtid(lastRecord.getCurrentGtid()); + } + + // split record list + List> splitLists = new ArrayList<>(); + for (int i = 0; i < connectRecordList.size(); i += sourceConfig.getBatchSize()) { + int end = Math.min(i + sourceConfig.getBatchSize(), connectRecordList.size()); + List subList = connectRecordList.subList(i, end); + splitLists.add(subList); + } + + for (int i = 0; i < splitLists.size(); i++) { + ConnectRecord connectRecord = new ConnectRecord(canalRecordPartition, canalRecordOffset, System.currentTimeMillis()); + connectRecord.addExtension("messageId", String.valueOf(message.getId())); + connectRecord.addExtension("batchIndex", i); + connectRecord.addExtension("totalBatches", splitLists.size()); + connectRecord.setData(JsonUtils.toJSONString(splitLists.get(i)).getBytes(StandardCharsets.UTF_8)); + result.add(connectRecord); + } + } + } else { + // for the message has been filtered need ack message + canalServer.ack(clientIdentity, message.getId()); + } + + return result; + } + + // Handle the situation of no data and avoid empty loop death + private void applyWait(int emptyTimes) { + int newEmptyTimes = Math.min(emptyTimes, maxEmptyTimes); + if (emptyTimes <= 3) { + Thread.yield(); + } else { + LockSupport.parkNanos(1000 * 1000L * newEmptyTimes); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java index a9d47b4604..0ae1f8f8ff 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java @@ -60,7 +60,7 @@ public CanalFullPositionMgr(CanalSourceFullConfig config, RdbTableMgr tableMgr) @Override protected void run() throws Exception { - if (config == null || config.getConnectorConfig() == null || config.getConnectorConfig().getDatabases() == null) { + if (config == null || config.getSourceConnectorConfig() == null || config.getSourceConnectorConfig().getDatabases() == null) { log.info("config or database is null"); return; } @@ -93,7 +93,7 @@ public boolean isFinished() { } private void initPositions() { - for (RdbDBDefinition database : config.getConnectorConfig().getDatabases()) { + for (RdbDBDefinition database : config.getSourceConnectorConfig().getDatabases()) { for (RdbTableDefinition table : database.getTables()) { try { RdbSimpleTable simpleTable = new RdbSimpleTable(database.getSchemaName(), table.getTableName()); diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java index 9b6038bdea..3df110f2e7 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java @@ -18,8 +18,8 @@ package org.apache.eventmesh.connector.http.sink; import org.apache.eventmesh.common.config.connector.Config; -import org.apache.eventmesh.connector.http.sink.config.HttpSinkConfig; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.common.config.connector.http.HttpSinkConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.connector.http.sink.handler.HttpSinkHandler; import org.apache.eventmesh.connector.http.sink.handler.impl.CommonHttpSinkHandler; import org.apache.eventmesh.connector.http.sink.handler.impl.HttpSinkHandlerRetryWrapper; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java index 95b40afe9e..9c8b1ce673 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java @@ -19,9 +19,11 @@ import org.apache.eventmesh.common.remote.offset.http.HttpRecordOffset; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.KeyValue; import java.io.Serializable; import java.time.LocalDateTime; +import java.util.Base64; import java.util.HashMap; import java.util.Map; import java.util.UUID; @@ -58,10 +60,9 @@ public class HttpConnectRecord implements Serializable { */ private String eventId; - /** - * The ConnectRecord to be sent - */ - private ConnectRecord data; + private Object data; + + private KeyValue extensions; @Override public String toString() { @@ -71,6 +72,7 @@ public String toString() { + ", type='" + type + ", eventId='" + eventId + ", data=" + data + + ", extensions=" + extensions + '}'; } @@ -83,16 +85,34 @@ public String toString() { public static HttpConnectRecord convertConnectRecord(ConnectRecord record, String type) { Map offsetMap = new HashMap<>(); if (record != null && record.getPosition() != null && record.getPosition().getRecordOffset() != null) { - offsetMap = ((HttpRecordOffset) record.getPosition().getRecordOffset()).getOffsetMap(); + if (HttpRecordOffset.class.equals(record.getPosition().getRecordOffsetClazz())) { + offsetMap = ((HttpRecordOffset) record.getPosition().getRecordOffset()).getOffsetMap(); + } } String offset = "0"; if (!offsetMap.isEmpty()) { offset = offsetMap.values().iterator().next().toString(); } - return HttpConnectRecord.builder() - .type(type) - .eventId(type + "-" + offset) - .data(record) - .build(); + if (record.getData() instanceof byte[]) { + String data = Base64.getEncoder().encodeToString((byte[]) record.getData()); + record.addExtension("isBase64", true); + return HttpConnectRecord.builder() + .type(type) + .createTime(LocalDateTime.now()) + .eventId(type + "-" + offset) + .data(data) + .extensions(record.getExtensions()) + .build(); + } else { + record.addExtension("isBase64", false); + return HttpConnectRecord.builder() + .type(type) + .createTime(LocalDateTime.now()) + .eventId(type + "-" + offset) + .data(record.getData()) + .extensions(record.getExtensions()) + .build(); + } } + } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java index 36d01115bb..5c868f4aa9 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.http.sink.handler; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; @@ -81,7 +81,7 @@ public void handle(ConnectRecord record) { attributes.put(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId(), retryEvent); // deliver the record - deliver(url, httpConnectRecord, attributes); + deliver(url, httpConnectRecord, attributes, record); } } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java index 1731809ab9..d5a27940e5 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java @@ -33,14 +33,14 @@ * *

Any class that needs to process ConnectRecords via HTTP or HTTPS should implement this interface. * Implementing classes must provide implementations for the {@link #start()}, {@link #handle(ConnectRecord)}, - * {@link #deliver(URI, HttpConnectRecord, Map)}, and {@link #stop()} methods.

+ * {@link #deliver(URI, HttpConnectRecord, Map, ConnectRecord)}, and {@link #stop()} methods.

* *

Implementing classes should ensure thread safety and handle HTTP/HTTPS communication efficiently. * The {@link #start()} method initializes any necessary resources for HTTP/HTTPS communication. The {@link #handle(ConnectRecord)} method processes a - * ConnectRecord by sending it over HTTP or HTTPS. The {@link #deliver(URI, HttpConnectRecord, Map)} method processes HttpConnectRecord on specified - * URL while returning its own processing logic {@link #stop()} method releases any resources used for HTTP/HTTPS communication.

+ * ConnectRecord by sending it over HTTP or HTTPS. The {@link #deliver(URI, HttpConnectRecord, Map, ConnectRecord)} method processes HttpConnectRecord + * on specified URL while returning its own processing logic {@link #stop()} method releases any resources used for HTTP/HTTPS communication.

* - *

It's recommended to handle exceptions gracefully within the {@link #deliver(URI, HttpConnectRecord, Map)} method + *

It's recommended to handle exceptions gracefully within the {@link #deliver(URI, HttpConnectRecord, Map, ConnectRecord)} method * to prevent message loss or processing interruptions.

*/ public interface HttpSinkHandler { @@ -66,7 +66,7 @@ public interface HttpSinkHandler { * @param attributes additional attributes to be used in processing * @return processing chain */ - Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes); + Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, ConnectRecord connectRecord); /** * Cleans up and releases resources used by the HTTP/HTTPS handler. This method should be called when the handler is no longer needed. diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java index 0907847455..e88707482f 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.http.sink.handler.impl; -import org.apache.eventmesh.common.remote.offset.http.HttpRecordOffset; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; +import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; @@ -29,8 +29,11 @@ import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.net.URI; +import java.time.ZoneId; +import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.TimeUnit; import io.netty.handler.codec.http.HttpHeaderNames; @@ -104,22 +107,25 @@ private void doInitWebClient() { * @return processing chain */ @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes) { + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, + ConnectRecord connectRecord) { // create headers + Map extensionMap = new HashMap<>(); + Set extensionKeySet = httpConnectRecord.getExtensions().keySet(); + for (String extensionKey : extensionKeySet) { + Object v = httpConnectRecord.getExtensions().getObject(extensionKey); + extensionMap.put(extensionKey, v); + } + MultiMap headers = HttpHeaders.headers() .set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=utf-8") - .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8"); - + .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8") + .set("extension", JsonUtils.toJSONString(extensionMap)); // get timestamp and offset - Long timestamp = httpConnectRecord.getData().getTimestamp(); - Map offset = null; - try { - // May throw NullPointerException. - offset = ((HttpRecordOffset) httpConnectRecord.getData().getPosition().getRecordOffset()).getOffsetMap(); - } catch (NullPointerException e) { - // ignore null pointer exception - } - final Map finalOffset = offset; + Long timestamp = httpConnectRecord.getCreateTime() + .atZone(ZoneId.systemDefault()) + .toInstant() + .toEpochMilli(); // send the request return this.webClient.post(url.getPath()) @@ -127,40 +133,38 @@ public Future> deliver(URI url, HttpConnectRecord httpConne .port(url.getPort() == -1 ? (Objects.equals(url.getScheme(), "https") ? 443 : 80) : url.getPort()) .putHeaders(headers) .ssl(Objects.equals(url.getScheme(), "https")) - .sendJson(httpConnectRecord) + .sendJson(httpConnectRecord.getData()) .onSuccess(res -> { - log.info("Request sent successfully. Record: timestamp={}, offset={}", timestamp, finalOffset); + log.info("Request sent successfully. Record: timestamp={}", timestamp); Exception e = null; // log the response if (HttpUtils.is2xxSuccessful(res.statusCode())) { if (log.isDebugEnabled()) { - log.debug("Received successful response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", - res.statusCode(), timestamp, finalOffset, res.bodyAsString()); + log.debug("Received successful response: statusCode={}. Record: timestamp={}, responseBody={}", + res.statusCode(), timestamp, res.bodyAsString()); } else { - log.info("Received successful response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, - finalOffset); + log.info("Received successful response: statusCode={}. Record: timestamp={}", res.statusCode(), timestamp); } } else { if (log.isDebugEnabled()) { - log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", - res.statusCode(), timestamp, finalOffset, res.bodyAsString()); + log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, responseBody={}", + res.statusCode(), timestamp, res.bodyAsString()); } else { - log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, - finalOffset); + log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}", res.statusCode(), timestamp); } e = new RuntimeException("Unexpected HTTP response code: " + res.statusCode()); } // try callback - tryCallback(httpConnectRecord, e, attributes); + tryCallback(httpConnectRecord, e, attributes, connectRecord); }).onFailure(err -> { - log.error("Request failed to send. Record: timestamp={}, offset={}", timestamp, finalOffset, err); + log.error("Request failed to send. Record: timestamp={}", timestamp, err); // try callback - tryCallback(httpConnectRecord, err, attributes); + tryCallback(httpConnectRecord, err, attributes, connectRecord); }); } @@ -171,7 +175,7 @@ public Future> deliver(URI url, HttpConnectRecord httpConne * @param e the exception thrown during the request, may be null * @param attributes additional attributes to be used in processing */ - private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map attributes) { + private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map attributes, ConnectRecord record) { // get the retry event HttpRetryEvent retryEvent = getAndUpdateRetryEvent(attributes, httpConnectRecord, e); @@ -180,7 +184,6 @@ private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes) { + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, + ConnectRecord connectRecord) { // Build the retry policy RetryPolicy> retryPolicy = RetryPolicy.>builder() @@ -104,7 +106,7 @@ public Future> deliver(URI url, HttpConnectRecord httpConne // Handle the ConnectRecord with retry policy Failsafe.with(retryPolicy) - .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord, attributes).toCompletionStage()); + .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord, attributes, connectRecord).toCompletionStage()); return null; } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java index ff8f69d45a..7edd84a967 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java @@ -17,15 +17,16 @@ package org.apache.eventmesh.connector.http.sink.handler.impl; +import org.apache.eventmesh.common.config.connector.http.HttpWebhookConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.common.exception.EventMeshException; import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; -import org.apache.eventmesh.connector.http.sink.config.HttpWebhookConfig; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.connector.http.sink.data.HttpExportMetadata; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecord; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecordPage; import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.commons.lang3.StringUtils; @@ -209,9 +210,10 @@ public void start() { * @return processing chain */ @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes) { + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, + ConnectRecord connectRecord) { // send the request - Future> responseFuture = super.deliver(url, httpConnectRecord, attributes); + Future> responseFuture = super.deliver(url, httpConnectRecord, attributes, connectRecord); // store the received data return responseFuture.onComplete(arr -> { // get tryEvent from attributes @@ -260,8 +262,7 @@ private HttpExportMetadata buildHttpExportMetadata(URI url, HttpResponse .code(response != null ? response.statusCode() : -1) .message(msg) .receivedTime(LocalDateTime.now()) - .httpRecordId(httpConnectRecord.getHttpRecordId()) - .recordId(httpConnectRecord.getData().getRecordId()) + .recordId(httpConnectRecord.getHttpRecordId()) .retryNum(retryEvent.getCurrentRetries()) .build(); } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java index 2fe7399da2..9e1dcb7b4c 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java @@ -20,6 +20,8 @@ import java.io.Serializable; import java.util.Map; +import io.vertx.ext.web.RoutingContext; + import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @@ -42,4 +44,6 @@ public class WebhookRequest implements Serializable { private Object payload; + private RoutingContext routingContext; + } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java index 738f045237..0761170ac0 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java @@ -19,21 +19,23 @@ import org.apache.eventmesh.common.Constants; import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; import org.apache.eventmesh.connector.http.source.data.CommonResponse; import org.apache.eventmesh.connector.http.source.data.WebhookRequest; import org.apache.eventmesh.connector.http.source.protocol.Protocol; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import java.util.Base64; import java.util.Map; import java.util.stream.Collectors; import io.netty.handler.codec.http.HttpResponseStatus; import io.vertx.core.http.HttpMethod; +import io.vertx.core.json.JsonObject; import io.vertx.ext.web.Route; import io.vertx.ext.web.handler.BodyHandler; - import lombok.extern.slf4j.Slf4j; /** @@ -69,12 +71,13 @@ public void setHandler(Route route, SynchronizedCircularFifoQueue queue) .handler(BodyHandler.create()) .handler(ctx -> { // Get the payload - String payloadStr = ctx.body().asString(Constants.DEFAULT_CHARSET.toString()); + Object payload = ctx.body().asString(Constants.DEFAULT_CHARSET.toString()); + payload = JsonUtils.parseObject(payload.toString(), String.class); // Create and store the webhook request Map headerMap = ctx.request().headers().entries().stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - WebhookRequest webhookRequest = new WebhookRequest(PROTOCOL_NAME, ctx.request().absoluteURI(), headerMap, payloadStr); + WebhookRequest webhookRequest = new WebhookRequest(PROTOCOL_NAME, ctx.request().absoluteURI(), headerMap, payload, ctx); if (!queue.offer(webhookRequest)) { throw new IllegalStateException("Failed to store the request."); } @@ -110,7 +113,27 @@ public ConnectRecord convertToConnectRecord(Object message) { ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), request.getPayload()); connectRecord.addExtension("source", request.getProtocolName()); connectRecord.addExtension("url", request.getUrl()); - connectRecord.addExtension("headers", request.getHeaders()); + request.getHeaders().forEach((k, v) -> { + if (k.equalsIgnoreCase("extension")) { + JsonObject extension = new JsonObject(v); + extension.forEach(e -> connectRecord.addExtension(e.getKey(), e.getValue())); + } + }); + // check recordUniqueId + if (!connectRecord.getExtensions().containsKey("recordUniqueId")) { + connectRecord.addExtension("recordUniqueId", connectRecord.getRecordId()); + } + + // check data + if (connectRecord.getExtensionObj("isBase64") != null) { + if (Boolean.parseBoolean(connectRecord.getExtensionObj("isBase64").toString())) { + byte[] data = Base64.getDecoder().decode(connectRecord.getData().toString()); + connectRecord.setData(data); + } + } + if (request.getRoutingContext() != null) { + connectRecord.addExtension("routingContext", request.getRoutingContext()); + } return connectRecord; } } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java index e86efcbf33..fac8c0d801 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java @@ -132,7 +132,7 @@ public void setHandler(Route route, SynchronizedCircularFifoQueue queue) // Create and store the webhook request Map headerMap = headers.entries().stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - WebhookRequest webhookRequest = new WebhookRequest(PROTOCOL_NAME, ctx.request().absoluteURI(), headerMap, payloadStr); + WebhookRequest webhookRequest = new WebhookRequest(PROTOCOL_NAME, ctx.request().absoluteURI(), headerMap, payloadStr, ctx); if (!queue.offer(webhookRequest)) { throw new IllegalStateException("Failed to store the request."); diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService new file mode 100644 index 0000000000..d62ff11992 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +HTTP-Source=org.apache.eventmesh.connector.http.source.HttpSourceConnector +HTTP-Sink=org.apache.eventmesh.connector.http.sink.HttpSinkConnector diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java index 7ddba511c4..5f65f0749f 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java @@ -20,8 +20,8 @@ import static org.mockserver.model.HttpRequest.request; -import org.apache.eventmesh.connector.http.sink.config.HttpSinkConfig; -import org.apache.eventmesh.connector.http.sink.config.HttpWebhookConfig; +import org.apache.eventmesh.common.config.connector.http.HttpSinkConfig; +import org.apache.eventmesh.common.config.connector.http.HttpWebhookConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.eventmesh.openconnect.util.ConfigUtil; diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java index cf1b853474..1ef048b06c 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java @@ -18,6 +18,9 @@ package org.apache.eventmesh.openconnect.api.connector; import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.remote.job.JobType; + +import java.util.Map; import lombok.Data; @@ -29,4 +32,8 @@ public class SinkConnectorContext implements ConnectorContext { public SinkConfig sinkConfig; + public Map runtimeConfig; + + public JobType jobType; + } diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java index f70e77248e..957452bb10 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java @@ -18,6 +18,7 @@ package org.apache.eventmesh.openconnect.api.connector; import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.remote.job.JobType; import org.apache.eventmesh.common.remote.offset.RecordPosition; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetStorageReader; @@ -38,6 +39,8 @@ public class SourceConnectorContext implements ConnectorContext { public Map runtimeConfig; + public JobType jobType; + // initial record position public List recordPositionList; diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java index a0390c1892..891df482be 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java @@ -23,6 +23,11 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter public class DefaultKeyValue implements KeyValue { private final Map properties; diff --git a/eventmesh-runtime-v2/build.gradle b/eventmesh-runtime-v2/build.gradle index ecba7bffb4..04b460ade3 100644 --- a/eventmesh-runtime-v2/build.gradle +++ b/eventmesh-runtime-v2/build.gradle @@ -35,6 +35,7 @@ dependencies { implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation project(":eventmesh-common") implementation project(":eventmesh-connectors:eventmesh-connector-canal") + implementation project(":eventmesh-connectors:eventmesh-connector-http") implementation project(":eventmesh-meta:eventmesh-meta-api") implementation project(":eventmesh-meta:eventmesh-meta-nacos") implementation project(":eventmesh-registry:eventmesh-registry-api") diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java index 501f222fd3..3d3c864b58 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java @@ -31,8 +31,10 @@ import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceStub; import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.JobState; import org.apache.eventmesh.common.remote.request.FetchJobRequest; import org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest; +import org.apache.eventmesh.common.remote.request.ReportJobRequest; import org.apache.eventmesh.common.remote.request.ReportVerifyRequest; import org.apache.eventmesh.common.remote.response.FetchJobResponse; import org.apache.eventmesh.common.utils.IPUtils; @@ -129,10 +131,14 @@ public class ConnectorRuntime implements Runtime { private final ScheduledExecutorService heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(); + private final ExecutorService reportVerifyExecutor = Executors.newSingleThreadExecutor(); + private final BlockingQueue queue; private volatile boolean isRunning = false; + private volatile boolean isFailed = false; + public static final String CALLBACK_EXTENSION = "callBackExtension"; private String adminServerAddr; @@ -207,6 +213,8 @@ private void initConnectorService() throws Exception { FetchJobResponse jobResponse = fetchJobConfig(); if (jobResponse == null) { + isFailed = true; + stop(); throw new RuntimeException("fetch job config fail"); } @@ -245,6 +253,7 @@ private void initConnectorService() throws Exception { SourceConnectorContext sourceConnectorContext = new SourceConnectorContext(); sourceConnectorContext.setSourceConfig(sourceConfig); sourceConnectorContext.setRuntimeConfig(connectorRuntimeConfig.getRuntimeConfig()); + sourceConnectorContext.setJobType(jobResponse.getType()); sourceConnectorContext.setOffsetStorageReader(offsetStorageReader); if (CollectionUtils.isNotEmpty(jobResponse.getPosition())) { sourceConnectorContext.setRecordPositionList(jobResponse.getPosition()); @@ -258,8 +267,12 @@ private void initConnectorService() throws Exception { SinkConfig sinkConfig = (SinkConfig) ConfigUtil.parse(connectorRuntimeConfig.getSinkConnectorConfig(), sinkConnector.configClass()); SinkConnectorContext sinkConnectorContext = new SinkConnectorContext(); sinkConnectorContext.setSinkConfig(sinkConfig); + sinkConnectorContext.setRuntimeConfig(connectorRuntimeConfig.getRuntimeConfig()); + sinkConnectorContext.setJobType(jobResponse.getType()); sinkConnector.init(sinkConnectorContext); + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.INIT); + } private FetchJobResponse fetchJobConfig() { @@ -306,6 +319,7 @@ public void start() throws Exception { try { startSinkConnector(); } catch (Exception e) { + isFailed = true; log.error("sink connector [{}] start fail", sinkConnector.name(), e); try { this.stop(); @@ -320,6 +334,7 @@ public void start() throws Exception { try { startSourceConnector(); } catch (Exception e) { + isFailed = true; log.error("source connector [{}] start fail", sourceConnector.name(), e); try { this.stop(); @@ -329,15 +344,25 @@ public void start() throws Exception { throw new RuntimeException(e); } }); + + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.RUNNING); } @Override public void stop() throws Exception { + log.info("ConnectorRuntime start stop"); + isRunning = false; + if (isFailed) { + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.FAIL); + } else { + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.COMPLETE); + } sourceConnector.stop(); sinkConnector.stop(); sourceService.shutdown(); sinkService.shutdown(); heartBeatExecutor.shutdown(); + reportVerifyExecutor.shutdown(); requestObserver.onCompleted(); if (channel != null && !channel.isShutdown()) { channel.shutdown(); @@ -351,6 +376,10 @@ private void startSourceConnector() throws Exception { // TODO: use producer pub record to storage replace below if (connectorRecordList != null && !connectorRecordList.isEmpty()) { for (ConnectRecord record : connectorRecordList) { + // check recordUniqueId + if (record.getExtensions() == null || !record.getExtensions().containsKey("recordUniqueId")) { + record.addExtension("recordUniqueId", record.getRecordId()); + } queue.put(record); @@ -364,10 +393,18 @@ private void startSourceConnector() throws Exception { record.setCallback(new SendMessageCallback() { @Override public void onSuccess(SendResult result) { + log.debug("send record to sink callback success, record: {}", record); // commit record sourceConnector.commit(record); - Optional submittedRecordPosition = prepareToUpdateRecordOffset(record); - submittedRecordPosition.ifPresent(RecordOffsetManagement.SubmittedPosition::ack); + if (record.getPosition() != null) { + Optional submittedRecordPosition = prepareToUpdateRecordOffset(record); + submittedRecordPosition.ifPresent(RecordOffsetManagement.SubmittedPosition::ack); + log.debug("start wait all messages to commit"); + offsetManagement.awaitAllMessages(5000, TimeUnit.MILLISECONDS); + // update & commit offset + updateCommittableOffsets(); + commitOffsets(); + } Optional callback = Optional.ofNullable(record.getExtensionObj(CALLBACK_EXTENSION)).map(v -> (SendMessageCallback) v); callback.ifPresent(cb -> cb.onSuccess(convertToSendResult(record))); @@ -375,6 +412,7 @@ public void onSuccess(SendResult result) { @Override public void onException(SendExceptionContext sendExceptionContext) { + isFailed = true; // handle exception sourceConnector.onException(record); log.error("send record to sink callback exception, process will shut down, record: {}", record, @@ -386,11 +424,6 @@ public void onException(SendExceptionContext sendExceptionContext) { } } }); - - offsetManagement.awaitAllMessages(5000, TimeUnit.MILLISECONDS); - // update & commit offset - updateCommittableOffsets(); - commitOffsets(); } } } @@ -406,24 +439,48 @@ private SendResult convertToSendResult(ConnectRecord record) { } private void reportVerifyRequest(ConnectRecord record, ConnectorRuntimeConfig connectorRuntimeConfig, ConnectorStage connectorStage) { - String md5Str = md5(record.toString()); - ReportVerifyRequest reportVerifyRequest = new ReportVerifyRequest(); - reportVerifyRequest.setTaskID(connectorRuntimeConfig.getTaskID()); - reportVerifyRequest.setRecordID(record.getRecordId()); - reportVerifyRequest.setRecordSig(md5Str); - reportVerifyRequest.setConnectorName( - IPUtils.getLocalAddress() + "_" + connectorRuntimeConfig.getJobID() + "_" + connectorRuntimeConfig.getRegion()); - reportVerifyRequest.setConnectorStage(connectorStage.name()); - reportVerifyRequest.setPosition(JsonUtils.toJSONString(record.getPosition())); - - Metadata metadata = Metadata.newBuilder().setType(ReportVerifyRequest.class.getSimpleName()).build(); + reportVerifyExecutor.submit(() -> { + try { + // use record data + recordUniqueId for md5 + String md5Str = md5(record.getData().toString() + record.getExtension("recordUniqueId")); + ReportVerifyRequest reportVerifyRequest = new ReportVerifyRequest(); + reportVerifyRequest.setTaskID(connectorRuntimeConfig.getTaskID()); + reportVerifyRequest.setJobID(connectorRuntimeConfig.getJobID()); + reportVerifyRequest.setRecordID(record.getRecordId()); + reportVerifyRequest.setRecordSig(md5Str); + reportVerifyRequest.setConnectorName( + IPUtils.getLocalAddress() + "_" + connectorRuntimeConfig.getJobID() + "_" + connectorRuntimeConfig.getRegion()); + reportVerifyRequest.setConnectorStage(connectorStage.name()); + reportVerifyRequest.setPosition(JsonUtils.toJSONString(record.getPosition())); + + Metadata metadata = Metadata.newBuilder().setType(ReportVerifyRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody( + Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportVerifyRequest)))) + .build()) + .build(); + + requestObserver.onNext(request); + } catch (Exception e) { + log.error("Failed to report verify request", e); + } + }); + } - Payload request = Payload.newBuilder().setMetadata(metadata) - .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportVerifyRequest)))) + private void reportJobRequest(String jobId, JobState jobState) throws InterruptedException { + ReportJobRequest reportJobRequest = new ReportJobRequest(); + reportJobRequest.setJobID(jobId); + reportJobRequest.setState(jobState); + Metadata metadata = Metadata.newBuilder() + .setType(ReportJobRequest.class.getSimpleName()) + .build(); + Payload payload = Payload.newBuilder() + .setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportJobRequest)))) .build()) .build(); - - requestObserver.onNext(request); + requestObserver.onNext(payload); } private String md5(String input) { From 659d748c6ecba2143765941674facc4399ea4c98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:09:37 +0800 Subject: [PATCH 09/51] Bump org.redisson:redisson from 3.32.0 to 3.35.0 (#5090) Bumps [org.redisson:redisson](https://github.com/redisson/redisson) from 3.32.0 to 3.35.0. - [Release notes](https://github.com/redisson/redisson/releases) - [Changelog](https://github.com/redisson/redisson/blob/master/CHANGELOG.md) - [Commits](https://github.com/redisson/redisson/compare/redisson-3.32.0...redisson-3.35.0) --- updated-dependencies: - dependency-name: org.redisson:redisson dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-redis/build.gradle | 2 +- eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/eventmesh-connectors/eventmesh-connector-redis/build.gradle b/eventmesh-connectors/eventmesh-connector-redis/build.gradle index 0c75e7e108..2525e078db 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-redis/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation 'org.redisson:redisson:3.32.0' + implementation 'org.redisson:redisson:3.35.0' api 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle index 1ba2ac0c7b..71d38d1763 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") // redisson - implementation 'org.redisson:redisson:3.32.0' + implementation 'org.redisson:redisson:3.35.0' // netty implementation 'io.netty:netty-all' From c7679d62cf1d1ea93572839c3aaf4614c624fe6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Deniz=20=C3=96=C4=9F=C3=BCt?= <46030809+denizOgut@users.noreply.github.com> Date: Sun, 1 Sep 2024 19:11:03 +0300 Subject: [PATCH 10/51] [ISSUE #4992] unit tests for JsonPathUtils.java (#5083) * test: adds unit tests for JsonPathUtils.java * fix: checkstyle errors --- .../common/utils/JsonPathUtilsTest.java | 171 ++++++++++++++++++ 1 file changed, 171 insertions(+) create mode 100644 eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java new file mode 100644 index 0000000000..e66b8d711a --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; + +public class JsonPathUtilsTest { + + @Test + public void tesTisEmptyJsonObject() { + String emptyJsonObject = "{}"; + assertTrue(JsonPathUtils.isEmptyJsonObject(emptyJsonObject)); + + String jsonObject = "{\"key\": \"value\"}"; + assertFalse(JsonPathUtils.isEmptyJsonObject(jsonObject)); + + String emptyJsonArray = "[]"; + assertFalse(JsonPathUtils.isEmptyJsonObject(emptyJsonArray)); + + String jsonArray = "[{\"key\": \"value\"}]"; + assertFalse(JsonPathUtils.isEmptyJsonObject(jsonArray)); + + String empty = ""; + assertFalse(JsonPathUtils.isEmptyJsonObject(empty)); + } + + @Test + public void testParseStrict() { + String json = "{\"key\": \"value\"}"; + JsonNode result = JsonPathUtils.parseStrict(json); + assertNotNull(result); + assertEquals("value", result.get("key").asText()); + + String emptyJsonObject = "{}"; + JsonNode result2 = JsonPathUtils.parseStrict(emptyJsonObject); + assertNotNull(result2); + assertTrue(result2.isEmpty()); + + } + + @Test + public void testBuildJsonString() { + Map person = new HashMap<>(); + person.put("name", "John"); + person.put("age", "30"); + String actual = JsonPathUtils.buildJsonString("person", person); + String excepted = "{\"person\":{\"name\":\"John\",\"age\":\"30\"}}"; + assertNotNull(actual); + assertEquals(excepted, actual); + } + + @Test + public void testIsValidAndDefinite() { + String jsonPath = "$.person[0].name"; + String jsonPath2 = "$.person[*].address.city"; + String jsonPath3 = "person.job[0].title"; + + assertTrue(JsonPathUtils.isValidAndDefinite(jsonPath)); + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath2)); + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath3)); + + String jsonPath4 = null; + String jsonPath5 = ""; + + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath4)); + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath5)); + } + + + @Test + public void testGetJsonPathValue() { + String jsonContent = "{ \"person\": { \"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" } } }"; + + String jsonPath1 = "$.person.name"; + String jsonPath2 = "$.person.address.city"; + String jsonPath3 = "$.person.age"; + + assertEquals("John Doe", JsonPathUtils.getJsonPathValue(jsonContent, jsonPath1)); + assertEquals("New York", JsonPathUtils.getJsonPathValue(jsonContent, jsonPath2)); + assertEquals("30", JsonPathUtils.getJsonPathValue(jsonContent, jsonPath3)); + + } + + @Test + public void testConvertToJsonNode() throws JsonProcessingException { + String jsonString1 = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + + JsonNode node1 = JsonPathUtils.convertToJsonNode(jsonString1); + assertEquals("John Doe", node1.get("name").asText()); + assertEquals("New York", node1.get("address").get("city").asText()); + assertEquals("30", node1.get("age").asText()); + } + + @Test + public void testMatchJsonPathValueWithString() { + String jsonString = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + + String jsonPath1 = "$.name"; + String result1 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath1); + assertEquals("John Doe", result1); + + String jsonPath2 = "$.age"; + String result2 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath2); + assertEquals("30", result2); // Age should be returned as a string + + String jsonPath3 = "$.address.city"; + String result3 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath3); + assertEquals("New York", result3); + + String jsonPath4 = "$.job"; + String result4 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath4); + assertEquals("null", result4); + } + + @Test + public void testJsonPathParse() { + String jsonString = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + + String jsonPath1 = "$.name"; + Object result1 = JsonPathUtils.jsonPathParse(jsonString, jsonPath1); + assertNotNull(result1); + assertEquals("John Doe", result1); + + String jsonPath2 = "$.address.city"; + Object result2 = JsonPathUtils.jsonPathParse(jsonString, jsonPath2); + assertNotNull(result2); + assertEquals("New York", result2); + } + + @Test + public void testMatchJsonPathValue() throws JsonProcessingException { + String jsonString = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + String jsonPath1 = "$.name"; + String result1 = JsonPathUtils.matchJsonPathValue(jsonString, jsonPath1); + assertEquals("\"John Doe\"", result1); + + String jsonPath2 = "$.address.city"; + String result2 = JsonPathUtils.matchJsonPathValue(jsonString, jsonPath2); + assertEquals("\"New York\"", result2); + + String jsonPath3 = "$.job"; + String result3 = JsonPathUtils.matchJsonPathValue(jsonString, jsonPath3); + assertEquals("null", result3); + } +} From fb09512174b1cebebd177f0dc1d575daf35c6ae5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:20:22 +0800 Subject: [PATCH 11/51] Bump io.netty:netty-all from 4.1.111.Final to 4.1.112.Final (#5064) Bumps [io.netty:netty-all](https://github.com/netty/netty) from 4.1.111.Final to 4.1.112.Final. - [Commits](https://github.com/netty/netty/compare/netty-4.1.111.Final...netty-4.1.112.Final) --- updated-dependencies: - dependency-name: io.netty:netty-all dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 01beb47a7b..50cc1f9ffb 100644 --- a/build.gradle +++ b/build.gradle @@ -733,7 +733,7 @@ subprojects { dependency "org.asynchttpclient:async-http-client:2.12.3" dependency "org.apache.httpcomponents:httpclient:4.5.14" - dependency "io.netty:netty-all:4.1.111.Final" + dependency "io.netty:netty-all:4.1.112.Final" dependency "io.dropwizard.metrics:metrics-core:${dropwizardMetricsVersion}" dependency "io.dropwizard.metrics:metrics-healthchecks:${dropwizardMetricsVersion}" From ab0820870cae3a2a53ff4c2b37e2ba12237ad488 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:22:12 +0800 Subject: [PATCH 12/51] Bump com.baomidou:mybatis-plus-boot-starter from 3.5.5 to 3.5.7 (#5060) Bumps [com.baomidou:mybatis-plus-boot-starter](https://github.com/baomidou/mybatis-plus) from 3.5.5 to 3.5.7. - [Release notes](https://github.com/baomidou/mybatis-plus/releases) - [Changelog](https://github.com/baomidou/mybatis-plus/blob/3.0/CHANGELOG.md) - [Commits](https://github.com/baomidou/mybatis-plus/compare/v3.5.5...v3.5.7) --- updated-dependencies: - dependency-name: com.baomidou:mybatis-plus-boot-starter dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 50cc1f9ffb..48db0b6eaf 100644 --- a/build.gradle +++ b/build.gradle @@ -798,7 +798,7 @@ subprojects { dependency "com.github.rholder:guava-retrying:2.0.0" dependency "com.alibaba:druid-spring-boot-starter:1.2.23" - dependency "com.baomidou:mybatis-plus-boot-starter:3.5.5" + dependency "com.baomidou:mybatis-plus-boot-starter:3.5.7" dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.18" dependency "com.mysql:mysql-connector-j:8.4.0" dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.10" From 622af004c92b4ad34af0899f0bc04eea83bae4bb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:22:51 +0800 Subject: [PATCH 13/51] Bump protobufVersion from 3.25.3 to 3.25.4 (#5057) Bumps `protobufVersion` from 3.25.3 to 3.25.4. Updates `com.google.protobuf:protobuf-java-util` from 3.25.3 to 3.25.4 Updates `com.google.protobuf:protobuf-java` from 3.25.3 to 3.25.4 - [Release notes](https://github.com/protocolbuffers/protobuf/releases) - [Changelog](https://github.com/protocolbuffers/protobuf/blob/main/protobuf_release.bzl) - [Commits](https://github.com/protocolbuffers/protobuf/compare/v3.25.3...v3.25.4) --- updated-dependencies: - dependency-name: com.google.protobuf:protobuf-java-util dependency-type: direct:production update-type: version-update:semver-patch - dependency-name: com.google.protobuf:protobuf-java dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-meta/eventmesh-meta-raft/build.gradle | 2 +- .../eventmesh-protocol-cloudevents/build.gradle | 2 +- eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle | 2 +- eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle | 2 +- .../eventmesh-protocol-meshmessage/build.gradle | 2 +- eventmesh-sdks/eventmesh-sdk-java/build.gradle | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eventmesh-meta/eventmesh-meta-raft/build.gradle b/eventmesh-meta/eventmesh-meta-raft/build.gradle index 6144bcdd56..5b2324ce57 100644 --- a/eventmesh-meta/eventmesh-meta-raft/build.gradle +++ b/eventmesh-meta/eventmesh-meta-raft/build.gradle @@ -20,7 +20,7 @@ plugins { } def grpcVersion = '1.65.1' -def protobufVersion = '3.25.3' +def protobufVersion = '3.25.4' def protocVersion = protobufVersion def jraftVersion = '1.3.14' diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle index 6b196a1f8e..c3904f4822 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle @@ -23,7 +23,7 @@ dependencies { implementation ("io.grpc:grpc-protobuf:1.65.1") { exclude group: "com.google.protobuf", module: "protobuf-java" } - implementation("com.google.protobuf:protobuf-java:3.25.3") + implementation("com.google.protobuf:protobuf-java:3.25.4") implementation "io.cloudevents:cloudevents-protobuf" compileOnly 'org.projectlombok:lombok' diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle index c28e10f728..0149929479 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle @@ -25,7 +25,7 @@ repositories { } def grpcVersion = '1.65.1' -def protobufVersion = '3.25.3' +def protobufVersion = '3.25.4' def protocVersion = protobufVersion dependencies { diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle index 2544359735..67a9ef6183 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle @@ -23,5 +23,5 @@ dependencies { implementation ("io.grpc:grpc-protobuf:1.65.1") { exclude group: "com.google.protobuf", module: "protobuf-java" } - implementation("com.google.protobuf:protobuf-java:3.25.3") + implementation("com.google.protobuf:protobuf-java:3.25.4") } diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle index edd8632919..d67f5fd9e3 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle @@ -21,7 +21,7 @@ dependencies { implementation ("io.grpc:grpc-protobuf:1.65.1") { exclude group: "com.google.protobuf", module: "protobuf-java" } - implementation("com.google.protobuf:protobuf-java:3.25.3") + implementation("com.google.protobuf:protobuf-java:3.25.4") implementation "io.cloudevents:cloudevents-protobuf" testImplementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") diff --git a/eventmesh-sdks/eventmesh-sdk-java/build.gradle b/eventmesh-sdks/eventmesh-sdk-java/build.gradle index c59cadb068..d2f8c122c2 100644 --- a/eventmesh-sdks/eventmesh-sdk-java/build.gradle +++ b/eventmesh-sdks/eventmesh-sdk-java/build.gradle @@ -54,7 +54,7 @@ dependencies { implementation "io.grpc:grpc-protobuf:${grpcVersion}" implementation "io.grpc:grpc-stub:${grpcVersion}" - implementation "com.google.protobuf:protobuf-java-util:3.25.3" + implementation "com.google.protobuf:protobuf-java-util:3.25.4" compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' From ab2a71934ba6f0f0d11f47d5f7a87b0751caf606 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:28:10 +0800 Subject: [PATCH 14/51] Bump org.apache.commons:commons-lang3 from 3.14.0 to 3.17.0 (#5087) Bumps org.apache.commons:commons-lang3 from 3.14.0 to 3.17.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-lang3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 48db0b6eaf..0faeac572f 100644 --- a/build.gradle +++ b/build.gradle @@ -708,7 +708,7 @@ subprojects { dependencyManagement { dependencies { - dependency "org.apache.commons:commons-lang3:3.14.0" + dependency "org.apache.commons:commons-lang3:3.17.0" dependency "org.apache.commons:commons-collections4:4.4" dependency "org.apache.commons:commons-text:1.12.0" dependency "commons-io:commons-io:2.16.1" From c3647524915db98197707753b5ebeb483712b9e0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:28:20 +0800 Subject: [PATCH 15/51] Bump com.alibaba.nacos:nacos-client from 2.3.3 to 2.4.1 (#5089) Bumps [com.alibaba.nacos:nacos-client](https://github.com/alibaba/nacos) from 2.3.3 to 2.4.1. - [Release notes](https://github.com/alibaba/nacos/releases) - [Changelog](https://github.com/alibaba/nacos/blob/develop/CHANGELOG.md) - [Commits](https://github.com/alibaba/nacos/commits/2.4.1) --- updated-dependencies: - dependency-name: com.alibaba.nacos:nacos-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 0faeac572f..a479b90fc0 100644 --- a/build.gradle +++ b/build.gradle @@ -782,7 +782,7 @@ subprojects { dependency "org.javassist:javassist:3.30.2-GA" - dependency "com.alibaba.nacos:nacos-client:2.3.3" + dependency "com.alibaba.nacos:nacos-client:2.4.1" dependency 'org.apache.zookeeper:zookeeper:3.9.2' dependency "org.apache.curator:curator-client:${curatorVersion}" From b28b321d09eca8d0db0aeb9d327589c8438e2850 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:29:41 +0800 Subject: [PATCH 16/51] Bump com.slack.api:bolt from 1.40.+ to 1.42.0 (#5084) Bumps [com.slack.api:bolt](https://github.com/slackapi/java-slack-sdk) from 1.40.+ to 1.42.0. - [Release notes](https://github.com/slackapi/java-slack-sdk/releases) - [Changelog](https://github.com/slackapi/java-slack-sdk/blob/main/jSlack_changelog.md) - [Commits](https://github.com/slackapi/java-slack-sdk/commits/v1.42.0) --- updated-dependencies: - dependency-name: com.slack.api:bolt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-slack/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-connectors/eventmesh-connector-slack/build.gradle b/eventmesh-connectors/eventmesh-connector-slack/build.gradle index d577d3358a..665f748b5f 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-slack/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation project(":eventmesh-sdks:eventmesh-sdk-java") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation "com.slack.api:bolt:1.40.+" + implementation "com.slack.api:bolt:1.42.+" implementation 'com.google.guava:guava' compileOnly 'org.projectlombok:lombok' From 3b979bd6c0ce23a845d26034425fe6ed1f4277ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:30:00 +0800 Subject: [PATCH 17/51] Bump io.vertx:vertx-web-client from 4.5.8 to 4.5.9 (#5054) Bumps io.vertx:vertx-web-client from 4.5.8 to 4.5.9. --- updated-dependencies: - dependency-name: io.vertx:vertx-web-client dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-http/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-connectors/eventmesh-connector-http/build.gradle b/eventmesh-connectors/eventmesh-connector-http/build.gradle index cfc69259d5..abd6b1fc64 100644 --- a/eventmesh-connectors/eventmesh-connector-http/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-http/build.gradle @@ -21,7 +21,7 @@ dependencies { implementation 'io.cloudevents:cloudevents-http-vertx:3.0.0' implementation 'io.vertx:vertx-web:4.5.8' - implementation 'io.vertx:vertx-web-client:4.5.8' + implementation 'io.vertx:vertx-web-client:4.5.9' implementation 'dev.failsafe:failsafe:3.3.2' From 6718e775c6d54c0e824ebbad1128322b84ecc6b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 00:30:24 +0800 Subject: [PATCH 18/51] Bump com.github.fppt:jedis-mock from 1.1.2 to 1.1.3 (#5062) Bumps [com.github.fppt:jedis-mock](https://github.com/fppt/jedis-mock) from 1.1.2 to 1.1.3. - [Release notes](https://github.com/fppt/jedis-mock/releases) - [Commits](https://github.com/fppt/jedis-mock/compare/v1.1.2...v1.1.3) --- updated-dependencies: - dependency-name: com.github.fppt:jedis-mock dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle index 71d38d1763..7a195562b5 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle @@ -29,7 +29,7 @@ dependencies { api 'io.cloudevents:cloudevents-json-jackson' // test dependencies - testImplementation 'com.github.fppt:jedis-mock:1.1.2' + testImplementation 'com.github.fppt:jedis-mock:1.1.3' testImplementation "org.mockito:mockito-core" compileOnly 'org.projectlombok:lombok' From af14260ce24ccedb14c34e215454c3e55a1ec805 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 04:10:55 +0800 Subject: [PATCH 19/51] Bump org.springframework.boot:spring-boot-starter-jetty from 2.7.10 to 2.7.18 (#5056) * Bump org.springframework.boot:spring-boot-starter-jetty Bumps [org.springframework.boot:spring-boot-starter-jetty](https://github.com/spring-projects/spring-boot) from 2.7.10 to 2.7.18. - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.7.10...v2.7.18) --- updated-dependencies: - dependency-name: org.springframework.boot:spring-boot-starter-jetty dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * feat: remove duplicate dep --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Pil0tXia --- build.gradle | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index a479b90fc0..909d540360 100644 --- a/build.gradle +++ b/build.gradle @@ -799,9 +799,8 @@ subprojects { dependency "com.alibaba:druid-spring-boot-starter:1.2.23" dependency "com.baomidou:mybatis-plus-boot-starter:3.5.7" - dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.18" dependency "com.mysql:mysql-connector-j:8.4.0" - dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.10" + dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.18" dependency "org.locationtech.jts:jts-core:1.19.0" } } From c52fe5a5732ea935ac01ce656c6147d95557cbf4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 04:11:04 +0800 Subject: [PATCH 20/51] Bump gradle/actions from 3 to 4 (#5091) * Bump gradle/actions from 3 to 4 Bumps [gradle/actions](https://github.com/gradle/actions) from 3 to 4. - [Release notes](https://github.com/gradle/actions/releases) - [Commits](https://github.com/gradle/actions/compare/v3...v4) --- updated-dependencies: - dependency-name: gradle/actions dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * feat: reduce version change --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Pil0tXia --- .github/dependabot.yml | 2 +- .github/workflows/ci.yml | 2 +- .github/workflows/code-scanning.yml | 2 +- .github/workflows/license.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5043e76662..508014de35 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -24,7 +24,7 @@ updates: interval: "monthly" ignore: - dependency-name: "*" - update-types: [ "version-update:semver-major" ] + update-types: [ "version-update:semver-major", "version-update:semver-patch" ] - dependency-name: "software.amazon.awssdk:s3" update-types: [ "version-update:semver-patch" ] - dependency-name: "com.aliyun:dingtalk" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2d86da1f51..bd5f977c2e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,7 +48,7 @@ jobs: make -C ./eventmesh-sdks/eventmesh-sdk-c - name: Setup Gradle - uses: gradle/actions/setup-gradle@v3 + uses: gradle/actions/setup-gradle@v4 - name: Set up JDK 11 uses: actions/setup-java@v4 diff --git a/.github/workflows/code-scanning.yml b/.github/workflows/code-scanning.yml index df3e57c89e..5476923b0a 100644 --- a/.github/workflows/code-scanning.yml +++ b/.github/workflows/code-scanning.yml @@ -60,7 +60,7 @@ jobs: - name: Setup Gradle if: matrix.language == 'java' - uses: gradle/actions/setup-gradle@v3 + uses: gradle/actions/setup-gradle@v4 with: cache-disabled: true diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml index ca0e0936a6..9172e85414 100644 --- a/.github/workflows/license.yml +++ b/.github/workflows/license.yml @@ -38,7 +38,7 @@ jobs: java-version: 11 - name: Setup Gradle - uses: gradle/actions/setup-gradle@v3 + uses: gradle/actions/setup-gradle@v4 - name: Check license compatibility run: ./gradlew clean checkDeniedLicense From af2ef6e7330b6ea5bb8888a1cd7971a293b34d40 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 04:11:15 +0800 Subject: [PATCH 21/51] Bump grpcVersion from 1.65.1 to 1.66.0 (#5085) Bumps `grpcVersion` from 1.65.1 to 1.66.0. Updates `io.grpc:grpc-core` from 1.65.1 to 1.66.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.65.1...v1.66.0) Updates `io.grpc:grpc-protobuf` from 1.65.1 to 1.66.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.65.1...v1.66.0) Updates `io.grpc:grpc-stub` from 1.65.1 to 1.66.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.65.1...v1.66.0) Updates `io.grpc:grpc-netty` from 1.65.1 to 1.66.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.65.1...v1.66.0) Updates `io.grpc:grpc-netty-shaded` from 1.65.1 to 1.66.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.65.1...v1.66.0) Updates `io.grpc:protoc-gen-grpc-java` from 1.65.1 to 1.66.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.65.1...v1.66.0) --- updated-dependencies: - dependency-name: io.grpc:grpc-core dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-protobuf dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-stub dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-netty dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-netty-shaded dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:protoc-gen-grpc-java dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- eventmesh-common/build.gradle | 2 +- eventmesh-examples/build.gradle | 2 +- eventmesh-meta/eventmesh-meta-raft/build.gradle | 2 +- .../eventmesh-protocol-cloudevents/build.gradle | 2 +- eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle | 2 +- eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle | 2 +- .../eventmesh-protocol-meshmessage/build.gradle | 2 +- eventmesh-sdks/eventmesh-sdk-java/build.gradle | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/build.gradle b/build.gradle index 909d540360..04938350ee 100644 --- a/build.gradle +++ b/build.gradle @@ -697,7 +697,7 @@ subprojects { sign publishing.publications.mavenJava } - def grpcVersion = '1.65.1' + def grpcVersion = '1.66.0' def log4jVersion = '2.23.1' def jacksonVersion = '2.17.2' def dropwizardMetricsVersion = '4.2.26' diff --git a/eventmesh-common/build.gradle b/eventmesh-common/build.gradle index c95e9f6c29..1cbef7ec24 100644 --- a/eventmesh-common/build.gradle +++ b/eventmesh-common/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.65.1' +def grpcVersion = '1.66.0' dependencies { api "com.google.guava:guava" diff --git a/eventmesh-examples/build.gradle b/eventmesh-examples/build.gradle index 509a03f59d..2cd5d66386 100644 --- a/eventmesh-examples/build.gradle +++ b/eventmesh-examples/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.65.1' +def grpcVersion = '1.66.0' dependencies { implementation project(":eventmesh-sdks:eventmesh-sdk-java") diff --git a/eventmesh-meta/eventmesh-meta-raft/build.gradle b/eventmesh-meta/eventmesh-meta-raft/build.gradle index 5b2324ce57..9718b24df5 100644 --- a/eventmesh-meta/eventmesh-meta-raft/build.gradle +++ b/eventmesh-meta/eventmesh-meta-raft/build.gradle @@ -19,7 +19,7 @@ plugins { id 'com.google.protobuf' version '0.9.4' } -def grpcVersion = '1.65.1' +def grpcVersion = '1.66.0' def protobufVersion = '3.25.4' def protocVersion = protobufVersion diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle index c3904f4822..2f6c05528a 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation "io.cloudevents:cloudevents-core" implementation "com.google.guava:guava" implementation "io.cloudevents:cloudevents-json-jackson" - implementation ("io.grpc:grpc-protobuf:1.65.1") { + implementation ("io.grpc:grpc-protobuf:1.66.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } implementation("com.google.protobuf:protobuf-java:3.25.4") diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle index 0149929479..844a4b4541 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle @@ -24,7 +24,7 @@ repositories { mavenCentral() } -def grpcVersion = '1.65.1' +def grpcVersion = '1.66.0' def protobufVersion = '3.25.4' def protocVersion = protobufVersion diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle index 67a9ef6183..8ca23d60a5 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation "io.cloudevents:cloudevents-core" implementation "com.google.guava:guava" implementation "io.cloudevents:cloudevents-json-jackson" - implementation ("io.grpc:grpc-protobuf:1.65.1") { + implementation ("io.grpc:grpc-protobuf:1.66.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } implementation("com.google.protobuf:protobuf-java:3.25.4") diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle index d67f5fd9e3..36ce1ed4d2 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") implementation "io.cloudevents:cloudevents-core" - implementation ("io.grpc:grpc-protobuf:1.65.1") { + implementation ("io.grpc:grpc-protobuf:1.66.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } implementation("com.google.protobuf:protobuf-java:3.25.4") diff --git a/eventmesh-sdks/eventmesh-sdk-java/build.gradle b/eventmesh-sdks/eventmesh-sdk-java/build.gradle index d2f8c122c2..1a83d9e4bc 100644 --- a/eventmesh-sdks/eventmesh-sdk-java/build.gradle +++ b/eventmesh-sdks/eventmesh-sdk-java/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.65.1' +def grpcVersion = '1.66.0' dependencies { api(project(":eventmesh-common")) { From 5483563078566bf66c02e094b7c64908de91e0f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 14:39:12 +0530 Subject: [PATCH 22/51] Bump software.amazon.awssdk:s3 from 2.26.3 to 2.27.17 (#5099) Bumps software.amazon.awssdk:s3 from 2.26.3 to 2.27.17. --- updated-dependencies: - dependency-name: software.amazon.awssdk:s3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 04938350ee..5cfe0bc9c2 100644 --- a/build.gradle +++ b/build.gradle @@ -794,7 +794,7 @@ subprojects { dependency "javax.annotation:javax.annotation-api:1.3.2" dependency "com.alibaba.fastjson2:fastjson2:2.0.52" - dependency "software.amazon.awssdk:s3:2.26.3" + dependency "software.amazon.awssdk:s3:2.27.17" dependency "com.github.rholder:guava-retrying:2.0.0" dependency "com.alibaba:druid-spring-boot-starter:1.2.23" From 9511f5a9a03a2e2db1e3242ea74613350f12347c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 14:40:25 +0530 Subject: [PATCH 23/51] Bump junit:junit from 4.12 to 4.13.2 (#5093) Bumps [junit:junit](https://github.com/junit-team/junit4) from 4.12 to 4.13.2. - [Release notes](https://github.com/junit-team/junit4/releases) - [Changelog](https://github.com/junit-team/junit4/blob/main/doc/ReleaseNotes4.12.md) - [Commits](https://github.com/junit-team/junit4/compare/r4.12...r4.13.2) --- updated-dependencies: - dependency-name: junit:junit dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-admin-server/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-admin-server/build.gradle b/eventmesh-admin-server/build.gradle index 95c8fa1372..fdfe1bffe8 100644 --- a/eventmesh-admin-server/build.gradle +++ b/eventmesh-admin-server/build.gradle @@ -38,7 +38,7 @@ dependencies { implementation "com.alibaba:druid-spring-boot-starter" compileOnly 'com.mysql:mysql-connector-j' compileOnly 'org.projectlombok:lombok' - testImplementation 'junit:junit:4.12' + testImplementation 'junit:junit:4.13.2' testImplementation 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' } From f908ffe712bc72c3a6d02d198b905a348b00b1c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 14:40:57 +0530 Subject: [PATCH 24/51] Bump com.github.jk1.dependency-license-report from 2.8 to 2.9 (#5095) Bumps com.github.jk1.dependency-license-report from 2.8 to 2.9. --- updated-dependencies: - dependency-name: com.github.jk1.dependency-license-report dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 5cfe0bc9c2..78c42c88fe 100644 --- a/build.gradle +++ b/build.gradle @@ -52,7 +52,7 @@ buildscript { plugins { id 'org.cyclonedx.bom' version '1.8.2' - id 'com.github.jk1.dependency-license-report' version '2.8' + id 'com.github.jk1.dependency-license-report' version '2.9' } allprojects { From c9244ddad2bb47fd558960c63f409ac3650df05c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 00:33:33 +0530 Subject: [PATCH 25/51] Bump org.junit.jupiter:junit-jupiter from 5.10.3 to 5.11.0 (#5097) Bumps [org.junit.jupiter:junit-jupiter](https://github.com/junit-team/junit5) from 5.10.3 to 5.11.0. - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.10.3...r5.11.0) --- updated-dependencies: - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 78c42c88fe..b8eded011e 100644 --- a/build.gradle +++ b/build.gradle @@ -109,7 +109,7 @@ allprojects { url "https://maven.aliyun.com/repository/public" } } - testImplementation "org.junit.jupiter:junit-jupiter:5.10.3" + testImplementation "org.junit.jupiter:junit-jupiter:5.11.0" } spotless { @@ -757,7 +757,7 @@ subprojects { dependency "org.springframework.boot:spring-boot-starter-web:2.7.18" dependency "io.openmessaging:registry-server:0.0.1" - dependency "org.junit.jupiter:junit-jupiter:5.10.3" + dependency "org.junit.jupiter:junit-jupiter:5.11.0" dependency "org.junit-pioneer:junit-pioneer:1.9.1" dependency "org.assertj:assertj-core:3.26.3" From a3dfb3b2c27a965be40402fc034c3a6b73af6f0d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 00:34:02 +0530 Subject: [PATCH 26/51] Bump com.google.guava:guava from 33.2.1-jre to 33.3.0-jre (#5096) Bumps [com.google.guava:guava](https://github.com/google/guava) from 33.2.1-jre to 33.3.0-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index b8eded011e..af4460a511 100644 --- a/build.gradle +++ b/build.gradle @@ -713,7 +713,7 @@ subprojects { dependency "org.apache.commons:commons-text:1.12.0" dependency "commons-io:commons-io:2.16.1" dependency "commons-validator:commons-validator:1.9.0" - dependency "com.google.guava:guava:33.2.1-jre" + dependency "com.google.guava:guava:33.3.0-jre" dependency "org.slf4j:slf4j-api:2.0.13" dependency "org.apache.logging.log4j:log4j-api:${log4jVersion}" From a40fa72c1efe83087358b99b7894d1ea6ee5e658 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Oct 2024 01:26:18 +0530 Subject: [PATCH 27/51] Bump com.gradle.develocity from 3.17.5 to 3.18.1 (#5121) Bumps com.gradle.develocity from 3.17.5 to 3.18.1. --- updated-dependencies: - dependency-name: com.gradle.develocity dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- settings.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.gradle b/settings.gradle index e9346bc461..a22363c809 100644 --- a/settings.gradle +++ b/settings.gradle @@ -16,7 +16,7 @@ */ plugins { - id 'com.gradle.develocity' version '3.17.5' + id 'com.gradle.develocity' version '3.18.1' id 'com.gradle.common-custom-user-data-gradle-plugin' version '2.0.2' } From 91f8a67d80d6a1eca10c69b15e81f128377e1a98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 6 Oct 2024 17:56:17 +0530 Subject: [PATCH 28/51] Bump software.amazon.awssdk:s3 from 2.27.17 to 2.28.12 (#5120) Bumps software.amazon.awssdk:s3 from 2.27.17 to 2.28.12. --- updated-dependencies: - dependency-name: software.amazon.awssdk:s3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index af4460a511..5776de3549 100644 --- a/build.gradle +++ b/build.gradle @@ -794,7 +794,7 @@ subprojects { dependency "javax.annotation:javax.annotation-api:1.3.2" dependency "com.alibaba.fastjson2:fastjson2:2.0.52" - dependency "software.amazon.awssdk:s3:2.27.17" + dependency "software.amazon.awssdk:s3:2.28.12" dependency "com.github.rholder:guava-retrying:2.0.0" dependency "com.alibaba:druid-spring-boot-starter:1.2.23" From a4e29d80a94bd3967caa34d312387d3ec9449876 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melis=20=C3=96lmez?= <77929541+melisolmez@users.noreply.github.com> Date: Mon, 7 Oct 2024 08:26:00 +0300 Subject: [PATCH 29/51] [ISSUE #4540] Add unit test for ThreadUtils (#5111) --- .../common/utils/ThreadUtilsTest.java | 118 ++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100644 eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java new file mode 100644 index 0000000000..0cba2a6ad9 --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.eventmesh.common.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; + +class ThreadUtilsTest { + + @Test + void testRandomPauseBetweenMinAndMax() { + + long min = 1000; + long max = 5000; + + long start = System.currentTimeMillis(); + ThreadUtils.randomPause(min, max, TimeUnit.MILLISECONDS); + long end = System.currentTimeMillis(); + + long pause = end - start; + + assertTrue(pause >= min && pause <= max, "Pause time should be between min and max"); + } + + @Test + void testRandomPauseWithInterruption() { + + Thread.currentThread().interrupt(); + ThreadUtils.randomPause(1000, 2000, TimeUnit.MILLISECONDS); + assertTrue(Thread.currentThread().isInterrupted()); + } + + @Test + void testDeprecatedSleep() { + + ThreadUtils.sleep(1000); + assertTrue(true, "Method should execute without any exception"); + } + + @Test + void testSleepWithTimeOutAndTimeUnit() throws InterruptedException { + + ThreadUtils.sleepWithThrowException(5000, TimeUnit.MILLISECONDS); + assertTrue(true, "Method should execute without any exception"); + } + + @Test + void testSleepWithNullTimeUnit() throws InterruptedException { + + ThreadUtils.sleepWithThrowException(5000, null); + assertTrue(true, "Method should not throw any exception with null TimeUnit"); + } + + @Test + void testSleepWithThrowExceptionInterruption() { + Thread.currentThread().interrupt(); + + assertThrows(InterruptedException.class, () -> { + ThreadUtils.sleepWithThrowException(5000, TimeUnit.MILLISECONDS); + }); + } + + @Test + void testGetPIDWithRealProcessId() { + + long pid = ThreadUtils.getPID(); + assertTrue(pid > 0); + + long cashedPId = ThreadUtils.getPID(); + assertEquals(pid, cashedPId); + } + + @Test + void testGetPIDWithMultiThread() throws InterruptedException { + + final long[] pid1 = new long[1]; + final long[] pid2 = new long[1]; + + Thread thread1 = new Thread(() -> { + pid1[0] = ThreadUtils.getPID(); + assertTrue(pid1[0] > 0); + }); + + Thread thread2 = new Thread(() -> { + pid2[0] = ThreadUtils.getPID(); + assertTrue(pid2[0] > 0); + }); + + thread1.start(); + thread2.start(); + + thread1.join(); + thread2.join(); + + assertEquals(pid1[0], pid2[0]); + } +} \ No newline at end of file From 61e72c781c285295e3a3bcbc6f8bcda26a9ffb90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 00:26:19 +0530 Subject: [PATCH 30/51] Bump com.rabbitmq:amqp-client from 5.21.0 to 5.22.0 (#5119) Bumps [com.rabbitmq:amqp-client](https://github.com/rabbitmq/rabbitmq-java-client) from 5.21.0 to 5.22.0. - [Release notes](https://github.com/rabbitmq/rabbitmq-java-client/releases) - [Commits](https://github.com/rabbitmq/rabbitmq-java-client/compare/v5.21.0...v5.22.0) --- updated-dependencies: - dependency-name: com.rabbitmq:amqp-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../eventmesh-connector-rabbitmq/build.gradle | 2 +- .../eventmesh-storage-rabbitmq/build.gradle | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle b/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle index 54764c7b63..2693b681d0 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle @@ -20,7 +20,7 @@ dependencies { api project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation project(":eventmesh-common") // rabbitmq - implementation 'com.rabbitmq:amqp-client:5.21.0' + implementation 'com.rabbitmq:amqp-client:5.22.0' implementation 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle index 8ca1ec8f8a..41eb93965e 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle @@ -19,12 +19,12 @@ dependencies { implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") implementation project(":eventmesh-common") // rabbitmq - implementation 'com.rabbitmq:amqp-client:5.21.0' + implementation 'com.rabbitmq:amqp-client:5.22.0' testImplementation project(":eventmesh-storage-plugin:eventmesh-storage-api") testImplementation project(":eventmesh-common") // rabbitmq - testImplementation 'com.rabbitmq:amqp-client:5.21.0' + testImplementation 'com.rabbitmq:amqp-client:5.22.0' implementation 'io.cloudevents:cloudevents-json-jackson' testImplementation 'io.cloudevents:cloudevents-json-jackson' From 092a3974b9b9882fde49af4c93475e5de1e4d358 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 16:27:32 +0530 Subject: [PATCH 31/51] Bump org.redisson:redisson from 3.35.0 to 3.36.0 (#5118) Bumps [org.redisson:redisson](https://github.com/redisson/redisson) from 3.35.0 to 3.36.0. - [Release notes](https://github.com/redisson/redisson/releases) - [Changelog](https://github.com/redisson/redisson/blob/master/CHANGELOG.md) - [Commits](https://github.com/redisson/redisson/compare/redisson-3.35.0...redisson-3.36.0) --- updated-dependencies: - dependency-name: org.redisson:redisson dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-redis/build.gradle | 2 +- eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/eventmesh-connectors/eventmesh-connector-redis/build.gradle b/eventmesh-connectors/eventmesh-connector-redis/build.gradle index 2525e078db..29b541958a 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-redis/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation 'org.redisson:redisson:3.35.0' + implementation 'org.redisson:redisson:3.36.0' api 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle index 7a195562b5..6fca0d8b13 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") // redisson - implementation 'org.redisson:redisson:3.35.0' + implementation 'org.redisson:redisson:3.36.0' // netty implementation 'io.netty:netty-all' From 7abe42a4efa1a9dfeb68902e1f6b441bb20e6459 Mon Sep 17 00:00:00 2001 From: KrispauI Date: Tue, 8 Oct 2024 21:43:36 +0800 Subject: [PATCH 32/51] [ISSUE #4990] Add unit test for HttpConvertsUtils.java (#5110) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add unit test for HttpConvertsUtils.java * fix package error * Bump com.gradle.develocity from 3.17.5 to 3.18.1 (#5121) Bumps com.gradle.develocity from 3.17.5 to 3.18.1. --- updated-dependencies: - dependency-name: com.gradle.develocity dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump software.amazon.awssdk:s3 from 2.27.17 to 2.28.12 (#5120) Bumps software.amazon.awssdk:s3 from 2.27.17 to 2.28.12. --- updated-dependencies: - dependency-name: software.amazon.awssdk:s3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * [ISSUE #4540] Add unit test for ThreadUtils (#5111) * Bump com.rabbitmq:amqp-client from 5.21.0 to 5.22.0 (#5119) Bumps [com.rabbitmq:amqp-client](https://github.com/rabbitmq/rabbitmq-java-client) from 5.21.0 to 5.22.0. - [Release notes](https://github.com/rabbitmq/rabbitmq-java-client/releases) - [Commits](https://github.com/rabbitmq/rabbitmq-java-client/compare/v5.21.0...v5.22.0) --- updated-dependencies: - dependency-name: com.rabbitmq:amqp-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * fix CI errors --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Melis Ölmez <77929541+melisolmez@users.noreply.github.com> --- .../eventmesh/common/stubs/HeaderStub.java | 35 ++++++++++ .../common/utils/HttpConvertsUtilsTest.java | 67 +++++++++++++++++++ 2 files changed, 102 insertions(+) create mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java create mode 100644 eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java new file mode 100644 index 0000000000..1782d46dd6 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.stubs; + +import org.apache.eventmesh.common.protocol.http.common.ProtocolKey; +import org.apache.eventmesh.common.protocol.http.header.Header; +import org.apache.eventmesh.common.utils.HttpConvertsUtils; + +import java.util.Map; + +public class HeaderStub extends Header { + + public String code; + public String eventmeshenv; + + @Override + public Map toMap() { + return new HttpConvertsUtils().httpMapConverts(this, new ProtocolKey(), new ProtocolKey.EventMeshInstanceKey()); + } +} diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java new file mode 100644 index 0000000000..253b1de926 --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.utils; + +import org.apache.eventmesh.common.protocol.http.common.ProtocolKey; +import org.apache.eventmesh.common.protocol.http.common.ProtocolKey.EventMeshInstanceKey; +import org.apache.eventmesh.common.protocol.http.header.Header; +import org.apache.eventmesh.common.stubs.HeaderStub; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class HttpConvertsUtilsTest { + + private final HeaderStub headerStub = new HeaderStub(); + private final ProtocolKey mockedProtocolKey = new ProtocolKey(); + private final EventMeshInstanceKey mockedEventMeshProtocolKey = new EventMeshInstanceKey(); + + @Test + void httpMapConverts() { + Map httpMapConverts = new HttpConvertsUtils().httpMapConverts(headerStub, mockedProtocolKey); + Assertions.assertEquals(httpMapConverts.get(headerStub.code), headerStub.code); + } + + @Test + void testHttpMapConverts() { + Map httpMapConverts = new HttpConvertsUtils().httpMapConverts(headerStub, mockedProtocolKey, mockedEventMeshProtocolKey); + Assertions.assertEquals(httpMapConverts.get(headerStub.code), headerStub.code); + Assertions.assertEquals(httpMapConverts.get(headerStub.eventmeshenv), headerStub.eventmeshenv); + } + + @Test + void httpHeaderConverts() { + HashMap headerParams = new HashMap<>(); + String code = "test"; + headerParams.put("code", code); + Header header = new HttpConvertsUtils().httpHeaderConverts(headerStub, headerParams); + Assertions.assertEquals(code, header.toMap().get("code")); + } + + @Test + void testHttpHeaderConverts() { + HashMap headerParams = new HashMap<>(); + String env = "test"; + headerParams.put("eventmeshenv", env); + Header header = new HttpConvertsUtils().httpHeaderConverts(headerStub, headerParams, mockedEventMeshProtocolKey); + Assertions.assertEquals(env, header.toMap().get("eventmeshenv")); + } +} From de44681318ee26ad2bf2b962a1e7d46e30d2195f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 03:03:53 +0530 Subject: [PATCH 33/51] Bump org.apache.httpcomponents.client5:httpclient5-fluent (#5117) Bumps [org.apache.httpcomponents.client5:httpclient5-fluent](https://github.com/apache/httpcomponents-client) from 5.3.1 to 5.4. - [Changelog](https://github.com/apache/httpcomponents-client/blob/master/RELEASE_NOTES.txt) - [Commits](https://github.com/apache/httpcomponents-client/compare/rel/v5.3.1...rel/v5.4) --- updated-dependencies: - dependency-name: org.apache.httpcomponents.client5:httpclient5-fluent dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-http/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-connectors/eventmesh-connector-http/build.gradle b/eventmesh-connectors/eventmesh-connector-http/build.gradle index abd6b1fc64..043d42b4b4 100644 --- a/eventmesh-connectors/eventmesh-connector-http/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-http/build.gradle @@ -26,7 +26,7 @@ dependencies { testImplementation 'org.apache.httpcomponents.client5:httpclient5:5.3.1' - testImplementation 'org.apache.httpcomponents.client5:httpclient5-fluent:5.3.1' + testImplementation 'org.apache.httpcomponents.client5:httpclient5-fluent:5.4' testImplementation 'org.mock-server:mockserver-netty:5.15.0' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' From 6b90e16205aca0dd7003df63fd3932ebc4c269a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 00:32:03 +0530 Subject: [PATCH 34/51] Bump org.apache.httpcomponents.client5:httpclient5 from 5.3.1 to 5.4 (#5116) Bumps [org.apache.httpcomponents.client5:httpclient5](https://github.com/apache/httpcomponents-client) from 5.3.1 to 5.4. - [Changelog](https://github.com/apache/httpcomponents-client/blob/master/RELEASE_NOTES.txt) - [Commits](https://github.com/apache/httpcomponents-client/compare/rel/v5.3.1...rel/v5.4) --- updated-dependencies: - dependency-name: org.apache.httpcomponents.client5:httpclient5 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-http/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-connectors/eventmesh-connector-http/build.gradle b/eventmesh-connectors/eventmesh-connector-http/build.gradle index 043d42b4b4..48c7aecd06 100644 --- a/eventmesh-connectors/eventmesh-connector-http/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-http/build.gradle @@ -25,7 +25,7 @@ dependencies { implementation 'dev.failsafe:failsafe:3.3.2' - testImplementation 'org.apache.httpcomponents.client5:httpclient5:5.3.1' + testImplementation 'org.apache.httpcomponents.client5:httpclient5:5.4' testImplementation 'org.apache.httpcomponents.client5:httpclient5-fluent:5.4' testImplementation 'org.mock-server:mockserver-netty:5.15.0' compileOnly 'org.projectlombok:lombok' From 20946ff1e159a03be5765d5712ee2d25ce347a30 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 21:32:00 +0530 Subject: [PATCH 35/51] Bump grpcVersion from 1.66.0 to 1.68.0 (#5115) Bumps `grpcVersion` from 1.66.0 to 1.68.0. Updates `io.grpc:grpc-core` from 1.66.0 to 1.68.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.66.0...v1.68.0) Updates `io.grpc:grpc-protobuf` from 1.66.0 to 1.68.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.66.0...v1.68.0) Updates `io.grpc:grpc-stub` from 1.66.0 to 1.68.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.66.0...v1.68.0) Updates `io.grpc:grpc-netty` from 1.66.0 to 1.68.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.66.0...v1.68.0) Updates `io.grpc:grpc-netty-shaded` from 1.66.0 to 1.68.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.66.0...v1.68.0) Updates `io.grpc:protoc-gen-grpc-java` from 1.66.0 to 1.68.0 - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.66.0...v1.68.0) --- updated-dependencies: - dependency-name: io.grpc:grpc-core dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-protobuf dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-stub dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-netty dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:grpc-netty-shaded dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: io.grpc:protoc-gen-grpc-java dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- eventmesh-common/build.gradle | 2 +- eventmesh-examples/build.gradle | 2 +- eventmesh-meta/eventmesh-meta-raft/build.gradle | 2 +- .../eventmesh-protocol-cloudevents/build.gradle | 2 +- eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle | 2 +- eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle | 2 +- .../eventmesh-protocol-meshmessage/build.gradle | 2 +- eventmesh-sdks/eventmesh-sdk-java/build.gradle | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/build.gradle b/build.gradle index 5776de3549..a18e2136d3 100644 --- a/build.gradle +++ b/build.gradle @@ -697,7 +697,7 @@ subprojects { sign publishing.publications.mavenJava } - def grpcVersion = '1.66.0' + def grpcVersion = '1.68.0' def log4jVersion = '2.23.1' def jacksonVersion = '2.17.2' def dropwizardMetricsVersion = '4.2.26' diff --git a/eventmesh-common/build.gradle b/eventmesh-common/build.gradle index 1cbef7ec24..21b6e63d44 100644 --- a/eventmesh-common/build.gradle +++ b/eventmesh-common/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.66.0' +def grpcVersion = '1.68.0' dependencies { api "com.google.guava:guava" diff --git a/eventmesh-examples/build.gradle b/eventmesh-examples/build.gradle index 2cd5d66386..bd90b83495 100644 --- a/eventmesh-examples/build.gradle +++ b/eventmesh-examples/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.66.0' +def grpcVersion = '1.68.0' dependencies { implementation project(":eventmesh-sdks:eventmesh-sdk-java") diff --git a/eventmesh-meta/eventmesh-meta-raft/build.gradle b/eventmesh-meta/eventmesh-meta-raft/build.gradle index 9718b24df5..210e348c86 100644 --- a/eventmesh-meta/eventmesh-meta-raft/build.gradle +++ b/eventmesh-meta/eventmesh-meta-raft/build.gradle @@ -19,7 +19,7 @@ plugins { id 'com.google.protobuf' version '0.9.4' } -def grpcVersion = '1.66.0' +def grpcVersion = '1.68.0' def protobufVersion = '3.25.4' def protocVersion = protobufVersion diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle index 2f6c05528a..e6ffc372b9 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation "io.cloudevents:cloudevents-core" implementation "com.google.guava:guava" implementation "io.cloudevents:cloudevents-json-jackson" - implementation ("io.grpc:grpc-protobuf:1.66.0") { + implementation ("io.grpc:grpc-protobuf:1.68.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } implementation("com.google.protobuf:protobuf-java:3.25.4") diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle index 844a4b4541..5929c72136 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle @@ -24,7 +24,7 @@ repositories { mavenCentral() } -def grpcVersion = '1.66.0' +def grpcVersion = '1.68.0' def protobufVersion = '3.25.4' def protocVersion = protobufVersion diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle index 8ca23d60a5..d219c5dc03 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation "io.cloudevents:cloudevents-core" implementation "com.google.guava:guava" implementation "io.cloudevents:cloudevents-json-jackson" - implementation ("io.grpc:grpc-protobuf:1.66.0") { + implementation ("io.grpc:grpc-protobuf:1.68.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } implementation("com.google.protobuf:protobuf-java:3.25.4") diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle index 36ce1ed4d2..3f15d199ff 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") implementation "io.cloudevents:cloudevents-core" - implementation ("io.grpc:grpc-protobuf:1.66.0") { + implementation ("io.grpc:grpc-protobuf:1.68.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } implementation("com.google.protobuf:protobuf-java:3.25.4") diff --git a/eventmesh-sdks/eventmesh-sdk-java/build.gradle b/eventmesh-sdks/eventmesh-sdk-java/build.gradle index 1a83d9e4bc..be55c650a3 100644 --- a/eventmesh-sdks/eventmesh-sdk-java/build.gradle +++ b/eventmesh-sdks/eventmesh-sdk-java/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.66.0' +def grpcVersion = '1.68.0' dependencies { api(project(":eventmesh-common")) { From b27e5165047f5c50596e75f2b9735866d7d9f5b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 21:32:31 +0530 Subject: [PATCH 36/51] Bump jacksonVersion from 2.17.2 to 2.18.0 (#5114) Bumps `jacksonVersion` from 2.17.2 to 2.18.0. Updates `com.fasterxml.jackson.core:jackson-databind` from 2.17.2 to 2.18.0 - [Commits](https://github.com/FasterXML/jackson/commits) Updates `com.fasterxml.jackson.core:jackson-core` from 2.17.2 to 2.18.0 - [Commits](https://github.com/FasterXML/jackson-core/compare/jackson-core-2.17.2...jackson-core-2.18.0) Updates `com.fasterxml.jackson.core:jackson-annotations` from 2.17.2 to 2.18.0 - [Commits](https://github.com/FasterXML/jackson/commits) Updates `com.fasterxml.jackson.dataformat:jackson-dataformat-yaml` from 2.17.2 to 2.18.0 - [Commits](https://github.com/FasterXML/jackson-dataformats-text/compare/jackson-dataformats-text-2.17.2...jackson-dataformats-text-2.18.0) Updates `com.fasterxml.jackson.datatype:jackson-datatype-jsr310` from 2.17.2 to 2.18.0 --- updated-dependencies: - dependency-name: com.fasterxml.jackson.core:jackson-databind dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: com.fasterxml.jackson.core:jackson-core dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: com.fasterxml.jackson.core:jackson-annotations dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: com.fasterxml.jackson.dataformat:jackson-dataformat-yaml dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: com.fasterxml.jackson.datatype:jackson-datatype-jsr310 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index a18e2136d3..0a4e51eb50 100644 --- a/build.gradle +++ b/build.gradle @@ -699,7 +699,7 @@ subprojects { def grpcVersion = '1.68.0' def log4jVersion = '2.23.1' - def jacksonVersion = '2.17.2' + def jacksonVersion = '2.18.0' def dropwizardMetricsVersion = '4.2.26' def opentelemetryVersion = '1.36.0' def cloudeventsVersion = '3.0.0' From d8b8629004462201c1111f4e4b3d52046a7daeb1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 20 Oct 2024 00:08:16 +0530 Subject: [PATCH 37/51] Bump commons-io:commons-io from 2.16.1 to 2.17.0 (#5112) Bumps commons-io:commons-io from 2.16.1 to 2.17.0. --- updated-dependencies: - dependency-name: commons-io:commons-io dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 0a4e51eb50..6d3e588a7a 100644 --- a/build.gradle +++ b/build.gradle @@ -46,7 +46,7 @@ buildscript { classpath "com.diffplug.spotless:spotless-plugin-gradle:6.13.0" classpath "org.apache.httpcomponents:httpclient:4.5.14" - classpath "commons-io:commons-io:2.16.1" + classpath "commons-io:commons-io:2.17.0" } } @@ -711,7 +711,7 @@ subprojects { dependency "org.apache.commons:commons-lang3:3.17.0" dependency "org.apache.commons:commons-collections4:4.4" dependency "org.apache.commons:commons-text:1.12.0" - dependency "commons-io:commons-io:2.16.1" + dependency "commons-io:commons-io:2.17.0" dependency "commons-validator:commons-validator:1.9.0" dependency "com.google.guava:guava:33.3.0-jre" From 63d15181e36fd1c6a24df4671acf12c7290cb144 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 19:19:35 +0530 Subject: [PATCH 38/51] Bump log4jVersion from 2.23.1 to 2.24.1 (#5113) Bumps `log4jVersion` from 2.23.1 to 2.24.1. Updates `org.apache.logging.log4j:log4j-api` from 2.23.1 to 2.24.1 Updates `org.apache.logging.log4j:log4j-core` from 2.23.1 to 2.24.1 Updates `org.apache.logging.log4j:log4j-slf4j2-impl` from 2.23.1 to 2.24.1 --- updated-dependencies: - dependency-name: org.apache.logging.log4j:log4j-api dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: org.apache.logging.log4j:log4j-core dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: org.apache.logging.log4j:log4j-slf4j2-impl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 6d3e588a7a..a5162ef3cb 100644 --- a/build.gradle +++ b/build.gradle @@ -698,7 +698,7 @@ subprojects { } def grpcVersion = '1.68.0' - def log4jVersion = '2.23.1' + def log4jVersion = '2.24.1' def jacksonVersion = '2.18.0' def dropwizardMetricsVersion = '4.2.26' def opentelemetryVersion = '1.36.0' From c1c381b270d1b1d08db5a1ff272dce2e351d25c6 Mon Sep 17 00:00:00 2001 From: Pil0tXia Date: Tue, 22 Oct 2024 03:15:00 +0800 Subject: [PATCH 39/51] [ISSUE #4836] Fix Git submodules checkout failure in CI `Build C` task (#4743) * checkout submodules * remove possible redundant "check out git submodule" * minor naming unification (inrelevant with PR subject) * Not triggering C task for now * Use the same version of actions/checkout as 'Build' task for 'License Check' task * Revert change to avoid conflict * Update a renamed label --- .github/workflows/ci.yml | 11 +++++------ .github/workflows/stale.yml | 2 +- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd5f977c2e..d63c381107 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,13 +39,12 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + submodules: true - - if: matrix.language == 'cpp' || matrix.language == 'csharp' - name: Build C - run: | - git submodule init - git submodule update - make -C ./eventmesh-sdks/eventmesh-sdk-c + - name: Build C SDK + if: matrix.language == 'cpp' + run: make -C ./eventmesh-sdks/eventmesh-sdk-c - name: Setup Gradle uses: gradle/actions/setup-gradle@v4 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c1e4f4ab15..8b5b166215 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -42,7 +42,7 @@ jobs: It has been 60 days since the last activity on this pull request. I am reaching out here to gently remind you that the Apache EventMesh community values every pull request, and please feel free to get in touch with the reviewers at any time. They are available to assist you in advancing the progress of your pull request and offering the latest feedback. If you encounter any challenges during development, seeking support within the community is encouraged. We sincerely appreciate your contributions to Apache EventMesh. - exempt-issue-labels: 'pinned,discussion,help wanted,WIP,weopen-star,GLCC,summer of code' + exempt-issue-labels: 'pinned,discussion,help wanted,WIP,weopen-star,GLCC,GSoC' exempt-pr-labels: 'help wanted,dependencies' exempt-all-milestones: true # Exempt all issues/PRs with milestones from stale operations-per-run: 300 From 37ed7e42c516b867b1dc79ff9857030e20be1271 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 01:52:12 +0530 Subject: [PATCH 40/51] Bump org.locationtech.jts:jts-core from 1.19.0 to 1.20.0 (#5098) Bumps org.locationtech.jts:jts-core from 1.19.0 to 1.20.0. --- updated-dependencies: - dependency-name: org.locationtech.jts:jts-core dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index a5162ef3cb..e4a7260841 100644 --- a/build.gradle +++ b/build.gradle @@ -801,7 +801,7 @@ subprojects { dependency "com.baomidou:mybatis-plus-boot-starter:3.5.7" dependency "com.mysql:mysql-connector-j:8.4.0" dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.18" - dependency "org.locationtech.jts:jts-core:1.19.0" + dependency "org.locationtech.jts:jts-core:1.20.0" } } } From 77063b322dbcd6bf597d090e55dd763c25f6c378 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Deniz=20=C3=96=C4=9F=C3=BCt?= <46030809+denizOgut@users.noreply.github.com> Date: Mon, 28 Oct 2024 05:10:42 +0300 Subject: [PATCH 41/51] [ISSUE #5092] unit test added for ``EtcdCustomService`` (#5107) --- .../etcd/service/EtcdCustomServiceTest.java | 122 ++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java diff --git a/eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java b/eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java new file mode 100644 index 0000000000..a20564ae01 --- /dev/null +++ b/eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry.etcd.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +import org.apache.eventmesh.api.meta.bo.EventMeshAppSubTopicInfo; +import org.apache.eventmesh.api.meta.bo.EventMeshServicePubTopicInfo; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.meta.etcd.service.EtcdCustomService; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.junit.jupiter.MockitoExtension; + +import io.etcd.jetcd.ByteSequence; +import io.etcd.jetcd.Client; +import io.etcd.jetcd.KV; +import io.etcd.jetcd.KeyValue; +import io.etcd.jetcd.kv.GetResponse; +import io.etcd.jetcd.options.GetOption; + +@ExtendWith(MockitoExtension.class) +public class EtcdCustomServiceTest { + + @Mock + private Client etcdClient; + + @Mock + private KV kvClient; + + @Mock + private KeyValue keyValue; + + @Mock + private GetResponse getResponse; + + @Mock + private CompletableFuture futureResponse; + + @InjectMocks + private EtcdCustomService etcdCustomService; + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + when(etcdClient.getKVClient()).thenReturn(kvClient); + } + + @Test + public void testFindEventMeshServicePubTopicInfos() throws Exception { + + EventMeshServicePubTopicInfo mockInfo = new EventMeshServicePubTopicInfo(); + mockInfo.setService("testService"); + mockInfo.setTopics(Collections.unmodifiableSet(new HashSet<>(Arrays.asList("topic1", "topic2")))); + + String mockValue = JsonUtils.toJSONString(mockInfo); + ByteSequence mockByteSequence = ByteSequence.from(mockValue, StandardCharsets.UTF_8); + + when(keyValue.getValue()).thenReturn(mockByteSequence); + when(getResponse.getKvs()).thenReturn(Arrays.asList(keyValue)); + when(futureResponse.get()).thenReturn(getResponse); + when(kvClient.get(any(ByteSequence.class), any(GetOption.class))).thenReturn(futureResponse); + + List result = etcdCustomService.findEventMeshServicePubTopicInfos(); + assertNotNull(result); + assertEquals(1, result.size()); + EventMeshServicePubTopicInfo resultInfo = result.get(0); + assertEquals("testService", resultInfo.getService()); + assertEquals(new HashSet<>(Arrays.asList("topic1", "topic2")), resultInfo.getTopics()); + } + + + @Test + public void testFindEventMeshAppSubTopicInfoByGroup() throws Exception { + + String group = "testGroup"; + EventMeshAppSubTopicInfo mockInfo = new EventMeshAppSubTopicInfo(); + + String mockValue = JsonUtils.toJSONString(mockInfo); + ByteSequence mockByteSequence = ByteSequence.from(mockValue, StandardCharsets.UTF_8); + + when(keyValue.getValue()).thenReturn(mockByteSequence); + when(kvClient.get(any(ByteSequence.class), any(GetOption.class))).thenReturn(futureResponse); + when(futureResponse.get()).thenReturn(getResponse); + when(getResponse.getKvs()).thenReturn(Collections.singletonList(keyValue)); + + EventMeshAppSubTopicInfo result = etcdCustomService.findEventMeshAppSubTopicInfoByGroup(group); + + assertNotNull(result); + } + +} From fe3d56b113e1b41c108731d0925746ae4edb0b91 Mon Sep 17 00:00:00 2001 From: Zaki Date: Mon, 28 Oct 2024 11:37:19 +0800 Subject: [PATCH 42/51] [ISSUE #5105] Fix the retry mechanism of the HttpSinkConnector (#5106) --- ...pRetryEvent.java => HttpAttemptEvent.java} | 74 +++++++++++++++---- .../sink/data/MultiHttpRequestContext.java | 16 +++- .../sink/handler/AbstractHttpSinkHandler.java | 9 +-- .../handler/impl/CommonHttpSinkHandler.java | 47 +++++------- .../impl/HttpSinkHandlerRetryWrapper.java | 57 +++++++------- .../handler/impl/WebhookHttpSinkHandler.java | 27 +++---- .../http/sink/HttpSinkConnectorTest.java | 2 - 7 files changed, 130 insertions(+), 102 deletions(-) rename eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/{HttpRetryEvent.java => HttpAttemptEvent.java} (52%) diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpAttemptEvent.java similarity index 52% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java rename to eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpAttemptEvent.java index 4b229f9839..8163852f8f 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpRetryEvent.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpAttemptEvent.java @@ -17,41 +17,82 @@ package org.apache.eventmesh.connector.http.sink.data; -import lombok.Data; +import java.util.concurrent.atomic.AtomicInteger; /** - * Single HTTP retry event + * Single HTTP attempt event */ -@Data -public class HttpRetryEvent { +public class HttpAttemptEvent { - public static final String PREFIX = "http-retry-event-"; + public static final String PREFIX = "http-attempt-event-"; - private String parentId; + private final int maxAttempts; - private int maxRetries; - - private int currentRetries; + private final AtomicInteger attempts; private Throwable lastException; + + public HttpAttemptEvent(int maxAttempts) { + this.maxAttempts = maxAttempts; + this.attempts = new AtomicInteger(0); + } + + /** + * Increment the attempts + */ + public void incrementAttempts() { + attempts.incrementAndGet(); + } + /** - * Increase the current retries by 1 + * Update the event, incrementing the attempts and setting the last exception + * + * @param exception the exception to update, can be null */ - public void increaseCurrentRetries() { - this.currentRetries++; + public void updateEvent(Throwable exception) { + // increment the attempts + incrementAttempts(); + + // update the last exception + lastException = exception; } /** - * Check if the current retries is greater than or equal to the max retries - * @return true if the current retries is greater than or equal to the max retries + * Check if the attempts are less than the maximum attempts + * + * @return true if the attempts are less than the maximum attempts, false otherwise */ - public boolean isMaxRetriesReached() { - return this.currentRetries >= this.maxRetries; + public boolean canAttempt() { + return attempts.get() < maxAttempts; + } + + public boolean isComplete() { + if (attempts.get() == 0) { + // No start yet + return false; + } + + // If no attempt can be made or the last exception is null, the event completed + return !canAttempt() || lastException == null; + } + + + public int getMaxAttempts() { + return maxAttempts; + } + + public int getAttempts() { + return attempts.get(); + } + + public Throwable getLastException() { + return lastException; } /** * Get the limited exception message with the default limit of 256 + * * @return the limited exception message */ public String getLimitedExceptionMessage() { @@ -60,6 +101,7 @@ public String getLimitedExceptionMessage() { /** * Get the limited exception message with the specified limit + * * @param maxLimit the maximum limit of the exception message * @return the limited exception message */ diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java index 67ab943818..66f5d0e7ec 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java @@ -34,10 +34,9 @@ public class MultiHttpRequestContext { /** * The last failed event. - * If there are no retries or retries are not enabled, it will be null. * If retries occur but still fail, it will be logged, and only the last one will be retained. */ - private HttpRetryEvent lastFailedEvent; + private HttpAttemptEvent lastFailedEvent; public MultiHttpRequestContext(int remainingEvents) { this.remainingRequests = new AtomicInteger(remainingEvents); @@ -50,15 +49,24 @@ public void decrementRemainingRequests() { remainingRequests.decrementAndGet(); } + /** + * Check if all requests have been processed. + * + * @return true if all requests have been processed, false otherwise. + */ + public boolean isAllRequestsProcessed() { + return remainingRequests.get() == 0; + } + public int getRemainingRequests() { return remainingRequests.get(); } - public HttpRetryEvent getLastFailedEvent() { + public HttpAttemptEvent getLastFailedEvent() { return lastFailedEvent; } - public void setLastFailedEvent(HttpRetryEvent lastFailedEvent) { + public void setLastFailedEvent(HttpAttemptEvent lastFailedEvent) { this.lastFailedEvent = lastFailedEvent; } } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java index 5c868f4aa9..28ba791127 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java @@ -18,8 +18,8 @@ package org.apache.eventmesh.connector.http.sink.handler; import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.sink.data.HttpAttemptEvent; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; -import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; @@ -75,10 +75,9 @@ public void handle(ConnectRecord record) { this.sinkConnectorConfig.getWebhookConfig().isActivate() ? "webhook" : "common"); HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - // add retry event to attributes - HttpRetryEvent retryEvent = new HttpRetryEvent(); - retryEvent.setMaxRetries(sinkConnectorConfig.getRetryConfig().getMaxRetries()); - attributes.put(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId(), retryEvent); + // add AttemptEvent to the attributes + HttpAttemptEvent attemptEvent = new HttpAttemptEvent(this.sinkConnectorConfig.getRetryConfig().getMaxRetries() + 1); + attributes.put(HttpAttemptEvent.PREFIX + httpConnectRecord.getHttpRecordId(), attemptEvent); // deliver the record deliver(url, httpConnectRecord, attributes, record); diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java index e88707482f..61bdc9f310 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java @@ -19,8 +19,8 @@ import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.http.sink.data.HttpAttemptEvent; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; -import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; import org.apache.eventmesh.connector.http.sink.handler.AbstractHttpSinkHandler; import org.apache.eventmesh.connector.http.util.HttpUtils; @@ -176,13 +176,14 @@ public Future> deliver(URI url, HttpConnectRecord httpConne * @param attributes additional attributes to be used in processing */ private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map attributes, ConnectRecord record) { - // get the retry event - HttpRetryEvent retryEvent = getAndUpdateRetryEvent(attributes, httpConnectRecord, e); + // get and update the attempt event + HttpAttemptEvent attemptEvent = (HttpAttemptEvent) attributes.get(HttpAttemptEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + attemptEvent.updateEvent(e); - // get the multi http request context - MultiHttpRequestContext multiHttpRequestContext = getAndUpdateMultiHttpRequestContext(attributes, retryEvent); + // get and update the multiHttpRequestContext + MultiHttpRequestContext multiHttpRequestContext = getAndUpdateMultiHttpRequestContext(attributes, attemptEvent); - if (multiHttpRequestContext.getRemainingRequests() == 0) { + if (multiHttpRequestContext.isAllRequestsProcessed()) { // do callback if (record.getCallback() == null) { if (log.isDebugEnabled()) { @@ -193,7 +194,8 @@ private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map attributes, HttpConnectRecord httpConnectRecord, Throwable e) { - // get the retry event - HttpRetryEvent retryEvent = (HttpRetryEvent) attributes.get(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId()); - // update the retry event - retryEvent.setLastException(e); - return retryEvent; - } - /** * Gets and updates the multi http request context based on the provided attributes and HttpConnectRecord. * - * @param attributes the attributes to use - * @param retryEvent the retry event to use + * @param attributes the attributes to use + * @param attemptEvent the HttpAttemptEvent to use * @return the updated multi http request context */ - private MultiHttpRequestContext getAndUpdateMultiHttpRequestContext(Map attributes, HttpRetryEvent retryEvent) { + private MultiHttpRequestContext getAndUpdateMultiHttpRequestContext(Map attributes, HttpAttemptEvent attemptEvent) { // get the multi http request context MultiHttpRequestContext multiHttpRequestContext = (MultiHttpRequestContext) attributes.get(MultiHttpRequestContext.NAME); - if (retryEvent.getLastException() == null || retryEvent.isMaxRetriesReached()) { + // Check if the current attempted event has completed + if (attemptEvent.isComplete()) { // decrement the counter multiHttpRequestContext.decrementRemainingRequests(); - // try set failed event - if (retryEvent.getLastException() != null) { - multiHttpRequestContext.setLastFailedEvent(retryEvent); + if (attemptEvent.getLastException() != null) { + // if all attempts are exhausted, set the last failed event + multiHttpRequestContext.setLastFailedEvent(attemptEvent); } } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java index 820b46296a..050839451a 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java @@ -20,7 +20,6 @@ import org.apache.eventmesh.common.config.connector.http.HttpRetryConfig; import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; -import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.eventmesh.connector.http.sink.handler.AbstractHttpSinkHandler; import org.apache.eventmesh.connector.http.sink.handler.HttpSinkHandler; import org.apache.eventmesh.connector.http.util.HttpUtils; @@ -51,10 +50,38 @@ public class HttpSinkHandlerRetryWrapper extends AbstractHttpSinkHandler { private final HttpSinkHandler sinkHandler; + private final RetryPolicy> retryPolicy; + public HttpSinkHandlerRetryWrapper(SinkConnectorConfig sinkConnectorConfig, HttpSinkHandler sinkHandler) { super(sinkConnectorConfig); this.sinkHandler = sinkHandler; this.httpRetryConfig = getSinkConnectorConfig().getRetryConfig(); + this.retryPolicy = buildRetryPolicy(); + } + + private RetryPolicy> buildRetryPolicy() { + return RetryPolicy.>builder() + .handleIf(e -> e instanceof ConnectException) + .handleResultIf(response -> httpRetryConfig.isRetryOnNonSuccess() && !HttpUtils.is2xxSuccessful(response.statusCode())) + .withMaxRetries(httpRetryConfig.getMaxRetries()) + .withDelay(Duration.ofMillis(httpRetryConfig.getInterval())) + .onRetry(event -> { + if (log.isDebugEnabled()) { + log.warn("Failed to deliver message after {} attempts. Retrying in {} ms. Error: {}", + event.getAttemptCount(), httpRetryConfig.getInterval(), event.getLastException()); + } else { + log.warn("Failed to deliver message after {} attempts. Retrying in {} ms.", + event.getAttemptCount(), httpRetryConfig.getInterval()); + } + }).onFailure(event -> { + if (log.isDebugEnabled()) { + log.error("Failed to deliver message after {} attempts. Error: {}", + event.getAttemptCount(), event.getException()); + } else { + log.error("Failed to deliver message after {} attempts.", + event.getAttemptCount()); + } + }).build(); } /** @@ -78,36 +105,8 @@ public void start() { @Override public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, ConnectRecord connectRecord) { - - // Build the retry policy - RetryPolicy> retryPolicy = RetryPolicy.>builder() - .handleIf(e -> e instanceof ConnectException) - .handleResultIf(response -> httpRetryConfig.isRetryOnNonSuccess() && !HttpUtils.is2xxSuccessful(response.statusCode())) - .withMaxRetries(httpRetryConfig.getMaxRetries()) - .withDelay(Duration.ofMillis(httpRetryConfig.getInterval())) - .onRetry(event -> { - if (log.isDebugEnabled()) { - log.warn("Retrying the request to {} for the {} time. {}", url, event.getAttemptCount(), httpConnectRecord); - } else { - log.warn("Retrying the request to {} for the {} time.", url, event.getAttemptCount()); - } - // update the retry event - HttpRetryEvent retryEvent = (HttpRetryEvent) attributes.get(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId()); - retryEvent.increaseCurrentRetries(); - }) - .onFailure(event -> { - if (log.isDebugEnabled()) { - log.error("Failed to send the request to {} after {} attempts. {}", url, event.getAttemptCount(), - httpConnectRecord, event.getException()); - } else { - log.error("Failed to send the request to {} after {} attempts.", url, event.getAttemptCount(), event.getException()); - } - }).build(); - - // Handle the ConnectRecord with retry policy Failsafe.with(retryPolicy) .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord, attributes, connectRecord).toCompletionStage()); - return null; } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java index 7edd84a967..0751918ee7 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java @@ -21,11 +21,11 @@ import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.common.exception.EventMeshException; import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; +import org.apache.eventmesh.connector.http.sink.data.HttpAttemptEvent; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.connector.http.sink.data.HttpExportMetadata; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecord; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecordPage; -import org.apache.eventmesh.connector.http.sink.data.HttpRetryEvent; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.commons.lang3.StringUtils; @@ -216,18 +216,14 @@ public Future> deliver(URI url, HttpConnectRecord httpConne Future> responseFuture = super.deliver(url, httpConnectRecord, attributes, connectRecord); // store the received data return responseFuture.onComplete(arr -> { - // get tryEvent from attributes - HttpRetryEvent retryEvent = (HttpRetryEvent) attributes.get(HttpRetryEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + // get HttpAttemptEvent + HttpAttemptEvent attemptEvent = (HttpAttemptEvent) attributes.get(HttpAttemptEvent.PREFIX + httpConnectRecord.getHttpRecordId()); - HttpResponse response = null; - if (arr.succeeded()) { - response = arr.result(); - } else { - retryEvent.setLastException(arr.cause()); - } + // get the response + HttpResponse response = arr.succeeded() ? arr.result() : null; // create ExportMetadata - HttpExportMetadata httpExportMetadata = buildHttpExportMetadata(url, response, httpConnectRecord, retryEvent); + HttpExportMetadata httpExportMetadata = buildHttpExportMetadata(url, response, httpConnectRecord, attemptEvent); // create ExportRecord HttpExportRecord exportRecord = new HttpExportRecord(httpExportMetadata, arr.succeeded() ? arr.result().bodyAsString() : null); @@ -242,17 +238,16 @@ public Future> deliver(URI url, HttpConnectRecord httpConne * @param url the URI to which the HttpConnectRecord was sent * @param response the response received from the URI * @param httpConnectRecord the HttpConnectRecord that was sent - * @param retryEvent the SingleHttpRetryEvent that was used for retries + * @param attemptEvent the HttpAttemptEvent that was used to send the HttpConnectRecord * @return the HttpExportMetadata object */ private HttpExportMetadata buildHttpExportMetadata(URI url, HttpResponse response, HttpConnectRecord httpConnectRecord, - HttpRetryEvent retryEvent) { + HttpAttemptEvent attemptEvent) { String msg = null; // order of precedence: lastException > response > null - if (retryEvent.getLastException() != null) { - msg = retryEvent.getLimitedExceptionMessage(); - retryEvent.setLastException(null); + if (attemptEvent.getLastException() != null) { + msg = attemptEvent.getLimitedExceptionMessage(); } else if (response != null) { msg = response.statusMessage(); } @@ -263,7 +258,7 @@ private HttpExportMetadata buildHttpExportMetadata(URI url, HttpResponse .message(msg) .receivedTime(LocalDateTime.now()) .recordId(httpConnectRecord.getHttpRecordId()) - .retryNum(retryEvent.getCurrentRetries()) + .retryNum(attemptEvent.getAttempts() - 1) .build(); } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java index 5f65f0749f..be2b52e737 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java @@ -83,14 +83,12 @@ void before() throws Exception { httpRequest -> { // Increase the number of requests received counter.incrementAndGet(); - JSONObject requestBody = JSON.parseObject(httpRequest.getBodyAsString()); return HttpResponse.response() .withContentType(MediaType.APPLICATION_JSON) .withStatusCode(HttpStatus.SC_OK) .withBody(new JSONObject() .fluentPut("code", 0) .fluentPut("message", "success") - .fluentPut("data", requestBody.getJSONObject("data").get("data")) .toJSONString() ); // .withDelay(TimeUnit.SECONDS, 10); } From 98fbf62073e2a9077798fcc8693769c42a726017 Mon Sep 17 00:00:00 2001 From: Zaki Date: Mon, 28 Oct 2024 11:44:18 +0800 Subject: [PATCH 43/51] [ISSUE #5108] Abstracting and transforming EventMeshFunction, and implementing FunctionRuntime. (#5109) * feat: Unified function module * feat: update something * feat: update FunctionRuntime * feat: update FunctionRuntime --- .../build.gradle | 5 - .../eventmesh-function-api}/build.gradle | 5 - .../api/AbstractEventMeshFunctionChain.java | 76 +++ .../function/api/EventMeshFunction.java | 43 ++ .../eventmesh-function-filter/build.gradle | 21 + .../function}/filter/PatternEntry.java | 4 +- .../condition/AnythingButCondition.java | 2 +- .../function}/filter/condition/Condition.java | 2 +- .../filter/condition/ConditionsBuilder.java | 2 +- .../filter/condition/ExistsCondition.java | 2 +- .../filter/condition/NumericCondition.java | 2 +- .../filter/condition/PrefixCondition.java | 2 +- .../filter/condition/SpecifiedCondition.java | 2 +- .../filter/condition/SuffixCondition.java | 2 +- .../function}/filter/pattern/Pattern.java | 25 +- .../filter/patternbuild/PatternBuilder.java | 36 +- .../function}/filter/PatternTest.java | 27 +- .../build.gradle | 16 +- .../transformer/ConstantTransformer.java | 2 +- .../function}/transformer/JsonPathParser.java | 15 +- .../transformer/OriginalTransformer.java | 3 +- .../function}/transformer/Template.java | 2 +- .../transformer/TemplateTransformer.java | 2 +- .../transformer/TransformException.java | 2 +- .../function/transformer/Transformer.java | 44 ++ .../transformer/TransformerBuilder.java | 22 +- .../transformer/TransformerParam.java | 2 +- .../transformer/TransformerType.java | 2 +- .../function}/transformer/Variable.java | 2 +- .../function}/transformer/TransformTest.java | 20 +- eventmesh-runtime-v2/build.gradle | 3 + .../runtime/function/FunctionRuntime.java | 465 ++++++++++++++++++ .../function/FunctionRuntimeConfig.java | 35 ++ .../function/FunctionRuntimeFactory.java | 2 +- .../StringEventMeshFunctionChain.java | 38 ++ .../src/main/resources/function.yaml | 21 + eventmesh-runtime/build.gradle | 5 +- .../eventmesh/runtime/boot/FilterEngine.java | 4 +- .../runtime/boot/TransformerEngine.java | 6 +- .../processor/SendAsyncEventProcessor.java | 4 +- .../http/push/AsyncHTTPPushRequest.java | 4 +- settings.gradle | 7 +- 42 files changed, 901 insertions(+), 85 deletions(-) rename {eventmesh-filter => eventmesh-function}/build.gradle (92%) rename {eventmesh-transformer => eventmesh-function/eventmesh-function-api}/build.gradle (92%) create mode 100644 eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java create mode 100644 eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java create mode 100644 eventmesh-function/eventmesh-function-filter/build.gradle rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/PatternEntry.java (94%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/AnythingButCondition.java (97%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/Condition.java (94%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/ConditionsBuilder.java (97%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/ExistsCondition.java (95%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/NumericCondition.java (97%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/PrefixCondition.java (95%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/SpecifiedCondition.java (95%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/condition/SuffixCondition.java (95%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/pattern/Pattern.java (75%) rename {eventmesh-filter/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function}/filter/patternbuild/PatternBuilder.java (85%) rename {eventmesh-filter/src/test/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function}/filter/PatternTest.java (82%) rename eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java => eventmesh-function/eventmesh-function-transformer/build.gradle (70%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/ConstantTransformer.java (95%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/JsonPathParser.java (85%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/OriginalTransformer.java (94%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/Template.java (96%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/TemplateTransformer.java (96%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/TransformException.java (95%) create mode 100644 eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/TransformerBuilder.java (69%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/TransformerParam.java (97%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/TransformerType.java (97%) rename {eventmesh-transformer/src/main/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function}/transformer/Variable.java (96%) rename {eventmesh-transformer/src/test/java/org/apache/eventmesh => eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function}/transformer/TransformTest.java (88%) create mode 100644 eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java create mode 100644 eventmesh-runtime-v2/src/main/resources/function.yaml diff --git a/eventmesh-filter/build.gradle b/eventmesh-function/build.gradle similarity index 92% rename from eventmesh-filter/build.gradle rename to eventmesh-function/build.gradle index ba88591b41..2944f98194 100644 --- a/eventmesh-filter/build.gradle +++ b/eventmesh-function/build.gradle @@ -14,8 +14,3 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - - -dependencies { - implementation project(":eventmesh-common") -} diff --git a/eventmesh-transformer/build.gradle b/eventmesh-function/eventmesh-function-api/build.gradle similarity index 92% rename from eventmesh-transformer/build.gradle rename to eventmesh-function/eventmesh-function-api/build.gradle index ba88591b41..2944f98194 100644 --- a/eventmesh-transformer/build.gradle +++ b/eventmesh-function/eventmesh-function-api/build.gradle @@ -14,8 +14,3 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - - -dependencies { - implementation project(":eventmesh-common") -} diff --git a/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java new file mode 100644 index 0000000000..8cbb0f9381 --- /dev/null +++ b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.function.api; + +import java.util.ArrayList; +import java.util.List; + +/** + * AbstractEventMeshFunctionChain is an abstract class that implements the {@link EventMeshFunction} interface and provides a framework + * for chaining multiple {@link EventMeshFunction} instances that operate on inputs of type {@code T} and produce outputs of type + * {@code R}. This class can be extended to create specific function chains with customized behavior for different + * data types. + * + *

The primary purpose of this class is to allow the sequential execution of functions, where the output of one + * function is passed as the input to the next function in the chain. The chain can be dynamically modified by adding + * functions either at the beginning or the end of the chain.

+ * + * @param the type of the input to the function + * @param the type of the result of the function + */ +public abstract class AbstractEventMeshFunctionChain implements EventMeshFunction { + + protected final List> functions; + + /** + * Default constructor that initializes an empty function chain. + */ + public AbstractEventMeshFunctionChain() { + this.functions = new ArrayList<>(); + } + + /** + * Constructor that initializes the function chain with a given list of functions. The functions will be executed + * in the order they are provided when the {@link #apply(Object)} method is called. + * + * @param functions the initial list of functions to be added to the chain + */ + public AbstractEventMeshFunctionChain(List> functions) { + this.functions = functions; + } + + /** + * Adds a {@link EventMeshFunction} to the beginning of the chain. The function will be executed first when the + * {@link #apply(Object)} method is called. + * + * @param function the function to be added to the beginning of the chain + */ + public void addFirst(EventMeshFunction function) { + this.functions.add(0, function); + } + + /** + * Adds a {@link EventMeshFunction} to the end of the chain. The function will be executed in sequence after all previously + * added functions when the {@link #apply(Object)} method is called. + * + * @param function the function to be added to the end of the chain + */ + public void addLast(EventMeshFunction function) { + this.functions.add(function); + } +} \ No newline at end of file diff --git a/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java new file mode 100644 index 0000000000..973f097ae0 --- /dev/null +++ b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.function.api; + +/** + * EventMesh Interface for a function that accepts one argument and produces a result. This is a functional interface whose functional method is + * {@link #apply(Object)}. + * + *

This interface is similar to {@link java.util.function.Function}, + * but it is specifically designed for use within the EventMesh. It allows defining custom functions to process data or events in the EventMesh. The + * main use case is to encapsulate operations that can be passed around and applied to data or event messages in the EventMesh processing + * pipeline.

+ * + * @param the type of the input to the function + * @param the type of the result of the function + */ +public interface EventMeshFunction { + + /** + * Applies this function to the given argument within the context of the EventMesh module. This method encapsulates the logic for processing the + * input data and producing a result, which can be used in the EventMesh event processing pipeline. + * + * @param t the function argument, representing the input data or event to be processed + * @return the function result, representing the processed output + */ + R apply(T t); + +} \ No newline at end of file diff --git a/eventmesh-function/eventmesh-function-filter/build.gradle b/eventmesh-function/eventmesh-function-filter/build.gradle new file mode 100644 index 0000000000..21e28d7baf --- /dev/null +++ b/eventmesh-function/eventmesh-function-filter/build.gradle @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +dependencies { + implementation project(":eventmesh-common") + implementation project(":eventmesh-function:eventmesh-function-api") +} \ No newline at end of file diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/PatternEntry.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/PatternEntry.java similarity index 94% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/PatternEntry.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/PatternEntry.java index 5a2493a371..acc2d5f073 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/PatternEntry.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/PatternEntry.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.filter; +package org.apache.eventmesh.function.filter; -import org.apache.eventmesh.filter.condition.Condition; +import org.apache.eventmesh.function.filter.condition.Condition; import java.util.ArrayList; import java.util.List; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/AnythingButCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/AnythingButCondition.java similarity index 97% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/AnythingButCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/AnythingButCondition.java index 2d58136a70..d4f209225e 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/AnythingButCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/AnythingButCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import java.util.ArrayList; import java.util.Iterator; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/Condition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/Condition.java similarity index 94% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/Condition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/Condition.java index fbb4276c7b..9890d5e0d3 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/Condition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/Condition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ConditionsBuilder.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ConditionsBuilder.java similarity index 97% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ConditionsBuilder.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ConditionsBuilder.java index 4e207663aa..961be85e5b 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ConditionsBuilder.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ConditionsBuilder.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ExistsCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ExistsCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ExistsCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ExistsCondition.java index 53c15bb297..c085ba6585 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ExistsCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ExistsCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/NumericCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/NumericCondition.java similarity index 97% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/NumericCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/NumericCondition.java index 5eb5374c7c..40eb16a75e 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/NumericCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/NumericCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import java.util.ArrayList; import java.util.List; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/PrefixCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/PrefixCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/PrefixCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/PrefixCondition.java index 633ed1fb02..ff5d0313ce 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/PrefixCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/PrefixCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SpecifiedCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SpecifiedCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SpecifiedCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SpecifiedCondition.java index f9cc3fb5db..9eefb6b641 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SpecifiedCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SpecifiedCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SuffixCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SuffixCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SuffixCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SuffixCondition.java index 805df0ee17..090df24834 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SuffixCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SuffixCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/pattern/Pattern.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/pattern/Pattern.java similarity index 75% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/pattern/Pattern.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/pattern/Pattern.java index 8abb306b84..955d9f59ef 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/pattern/Pattern.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/pattern/Pattern.java @@ -15,10 +15,11 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.pattern; +package org.apache.eventmesh.function.filter.pattern; import org.apache.eventmesh.common.utils.JsonPathUtils; -import org.apache.eventmesh.filter.PatternEntry; +import org.apache.eventmesh.function.api.EventMeshFunction; +import org.apache.eventmesh.function.filter.PatternEntry; import org.apache.commons.lang3.StringUtils; @@ -29,12 +30,11 @@ import com.fasterxml.jackson.databind.JsonNode; import com.jayway.jsonpath.PathNotFoundException; -public class Pattern { - private List requiredFieldList = new ArrayList<>(); - private List dataList = new ArrayList<>(); +public class Pattern implements EventMeshFunction { - private String content; + private final List requiredFieldList = new ArrayList<>(); + private final List dataList = new ArrayList<>(); public void addRequiredFieldList(PatternEntry patternEntry) { this.requiredFieldList.add(patternEntry); @@ -45,19 +45,22 @@ public void addDataList(PatternEntry patternEntry) { } public boolean filter(String content) { - this.content = content; - // this.jsonNode = JacksonUtils.STRING_TO_JSONNODE(content); + return matchRequiredFieldList(content, requiredFieldList) && matchRequiredFieldList(content, dataList); + } - return matchRequiredFieldList(requiredFieldList) && matchRequiredFieldList(dataList); + @Override + public String apply(String content) { + // filter content + return filter(content) ? content : null; } - private boolean matchRequiredFieldList(List dataList) { + private boolean matchRequiredFieldList(String content, List dataList) { for (final PatternEntry patternEntry : dataList) { JsonNode jsonElement = null; try { // content:filter - String matchRes = JsonPathUtils.matchJsonPathValue(this.content, patternEntry.getPatternPath()); + String matchRes = JsonPathUtils.matchJsonPathValue(content, patternEntry.getPatternPath()); if (StringUtils.isNoneBlank(matchRes)) { jsonElement = JsonPathUtils.parseStrict(matchRes); diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/patternbuild/PatternBuilder.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/patternbuild/PatternBuilder.java similarity index 85% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/patternbuild/PatternBuilder.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/patternbuild/PatternBuilder.java index 5f9a71d262..60193a4efa 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/patternbuild/PatternBuilder.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/patternbuild/PatternBuilder.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.patternbuild; +package org.apache.eventmesh.function.filter.patternbuild; import org.apache.eventmesh.common.exception.JsonException; -import org.apache.eventmesh.filter.PatternEntry; -import org.apache.eventmesh.filter.condition.Condition; -import org.apache.eventmesh.filter.condition.ConditionsBuilder; -import org.apache.eventmesh.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.PatternEntry; +import org.apache.eventmesh.function.filter.condition.Condition; +import org.apache.eventmesh.function.filter.condition.ConditionsBuilder; +import org.apache.eventmesh.function.filter.pattern.Pattern; import java.util.ArrayDeque; import java.util.Iterator; @@ -38,19 +38,33 @@ public class PatternBuilder { private static final ObjectMapper mapper = new ObjectMapper(); - public static Pattern build(String jsonStr) { - Pattern pattern = new Pattern(); - JsonNode jsonNode = null; + public static Pattern build(String jsonStr) { try { - jsonNode = mapper.readTree(jsonStr); + JsonNode jsonNode = mapper.readTree(jsonStr); + if (jsonNode.isEmpty() || !jsonNode.isObject()) { + return null; + } + return build(jsonNode); } catch (Exception e) { throw new JsonException("INVALID_JSON_STRING", e); } + } - if (jsonNode.isEmpty() || !jsonNode.isObject()) { - return null; + public static Pattern build(Map conditionMap) { + try { + JsonNode jsonNode = mapper.valueToTree(conditionMap); + if (jsonNode.isEmpty() || !jsonNode.isObject()) { + return null; + } + return build(jsonNode); + } catch (Exception e) { + throw new JsonException("INVALID_MAP", e); } + } + + public static Pattern build(JsonNode jsonNode) { + Pattern pattern = new Pattern(); // iter all json data Iterator> iterator = jsonNode.fields(); diff --git a/eventmesh-filter/src/test/java/org/apache/eventmesh/filter/PatternTest.java b/eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function/filter/PatternTest.java similarity index 82% rename from eventmesh-filter/src/test/java/org/apache/eventmesh/filter/PatternTest.java rename to eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function/filter/PatternTest.java index 207992b0c1..bc0aeff4ea 100644 --- a/eventmesh-filter/src/test/java/org/apache/eventmesh/filter/PatternTest.java +++ b/eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function/filter/PatternTest.java @@ -15,10 +15,15 @@ * limitations under the License. */ -package org.apache.eventmesh.filter; +package org.apache.eventmesh.function.filter; -import org.apache.eventmesh.filter.pattern.Pattern; -import org.apache.eventmesh.filter.patternbuild.PatternBuilder; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.patternbuild.PatternBuilder; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -144,4 +149,20 @@ public void testAnythingButFilter() { Assertions.assertEquals(false, res); } + @Test + public void testPrefixFilterMap() { + // Create the inner Map representing {prefix=eventmesh.} + Map innerMap = new HashMap<>(); + innerMap.put("prefix", "eventmesh."); + // Create a List representing [{prefix=eventmesh.}] + List> sourceList = Collections.singletonList(innerMap); + // Create the condition representing {source=[{prefix=eventmesh.}]} + Map condition = new HashMap<>(); + condition.put("source", sourceList); + + Pattern pattern = PatternBuilder.build(condition); + Boolean res = pattern.filter(event); + Assertions.assertEquals(true, res); + } + } diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java b/eventmesh-function/eventmesh-function-transformer/build.gradle similarity index 70% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java rename to eventmesh-function/eventmesh-function-transformer/build.gradle index 8239dfcb6e..6939bbd483 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java +++ b/eventmesh-function/eventmesh-function-transformer/build.gradle @@ -15,18 +15,8 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; - -import com.fasterxml.jackson.core.JsonProcessingException; - -/** - * EventMesh transformer interface, specified transformer implementation includes: - * 1. Constant - * 2. Original - * 3. Template - */ -public interface Transformer { - - String transform(String json) throws JsonProcessingException; +dependencies { + implementation project(":eventmesh-common") + implementation project(":eventmesh-function:eventmesh-function-api") } diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/ConstantTransformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/ConstantTransformer.java similarity index 95% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/ConstantTransformer.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/ConstantTransformer.java index dd7c20aace..ae77f149f7 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/ConstantTransformer.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/ConstantTransformer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; public class ConstantTransformer implements Transformer { diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/JsonPathParser.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/JsonPathParser.java similarity index 85% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/JsonPathParser.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/JsonPathParser.java index a0ebde12d2..c578310dc4 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/JsonPathParser.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/JsonPathParser.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import org.apache.eventmesh.common.utils.JsonPathUtils; @@ -35,6 +35,19 @@ public List getVariablesList() { return variablesList; } + /** + * parser input jsonpath map into variable list + * + * @param jsonPathMap jsonpath map + */ + public JsonPathParser(Map jsonPathMap) { + for (Map.Entry entry : jsonPathMap.entrySet()) { + String name = entry.getKey(); + String value = entry.getValue(); + variablesList.add(new Variable(name, value)); + } + } + /** * parser input jsonpath string into variable list * diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/OriginalTransformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/OriginalTransformer.java similarity index 94% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/OriginalTransformer.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/OriginalTransformer.java index 61aa059d59..59ce0350eb 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/OriginalTransformer.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/OriginalTransformer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; class OriginalTransformer implements Transformer { @@ -23,4 +23,5 @@ class OriginalTransformer implements Transformer { public String transform(String json) { return json; } + } diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Template.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Template.java similarity index 96% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Template.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Template.java index 19c3b5cec3..29d975c371 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Template.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Template.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import org.apache.commons.text.StringSubstitutor; diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TemplateTransformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TemplateTransformer.java similarity index 96% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TemplateTransformer.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TemplateTransformer.java index bc9907ff48..69cee68269 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TemplateTransformer.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TemplateTransformer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import java.util.List; diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformException.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformException.java similarity index 95% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformException.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformException.java index 1b11a29d80..aeb827fc88 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformException.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; /** * Transform exception diff --git a/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java new file mode 100644 index 0000000000..be0e815808 --- /dev/null +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.function.transformer; + +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.function.api.EventMeshFunction; + +import com.fasterxml.jackson.core.JsonProcessingException; + +/** + * EventMesh transformer interface, specified transformer implementation includes: + * 1. Constant + * 2. Original + * 3. Template + */ +public interface Transformer extends EventMeshFunction { + + String transform(String json) throws JsonProcessingException; + + @Override + default String apply(String content) { + try { + return transform(content); + } catch (JsonProcessingException e) { + throw new EventMeshException("Failed to transform content", e); + } + } + +} diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerBuilder.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerBuilder.java similarity index 69% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerBuilder.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerBuilder.java index e7277af73c..916f1ef7bc 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerBuilder.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerBuilder.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; + +import java.util.Map; public class TransformerBuilder { @@ -32,9 +34,23 @@ public static Transformer buildTransformer(TransformerParam transformerParam) { } } - public static Transformer buildTemplateTransFormer(String jsonContent, String template) { - JsonPathParser jsonPathParser = new JsonPathParser(jsonContent); + /** + * build template transformer + * @param jsonContent json content, support string and map, other type will throw IllegalArgumentException + * @param template template string + * @return transformer + */ + @SuppressWarnings("unchecked") + public static Transformer buildTemplateTransFormer(Object jsonContent, String template) { Template templateEntry = new Template(template); + JsonPathParser jsonPathParser; + if (jsonContent instanceof String) { + jsonPathParser = new JsonPathParser((String) jsonContent); + } else if (jsonContent instanceof Map) { + jsonPathParser = new JsonPathParser((Map) jsonContent); + } else { + throw new TransformException("invalid json content"); + } return new TemplateTransformer(jsonPathParser, templateEntry); } diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerParam.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerParam.java similarity index 97% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerParam.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerParam.java index d747d7be4c..915111e01d 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerParam.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerParam.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; public class TransformerParam { diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerType.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerType.java similarity index 97% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerType.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerType.java index 2dc7809478..969c49ce80 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerType.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerType.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import java.util.Objects; diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Variable.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Variable.java similarity index 96% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Variable.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Variable.java index c9259d335c..aee80e1454 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Variable.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Variable.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; public class Variable { diff --git a/eventmesh-transformer/src/test/java/org/apache/eventmesh/transformer/TransformTest.java b/eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function/transformer/TransformTest.java similarity index 88% rename from eventmesh-transformer/src/test/java/org/apache/eventmesh/transformer/TransformTest.java rename to eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function/transformer/TransformTest.java index a55cde0baf..f9a444e8f9 100644 --- a/eventmesh-transformer/src/test/java/org/apache/eventmesh/transformer/TransformTest.java +++ b/eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function/transformer/TransformTest.java @@ -15,7 +15,10 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; + +import java.util.Collections; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -139,4 +142,19 @@ public void testTemplateTransFormerWithConstant() throws JsonProcessingException output); } + @Test + public void testTemplateTransFormerWithStringValueMap() throws JsonProcessingException { + Map content = Collections.singletonMap("data-name", "$.data.name"); + + String template = "Transformers test:data name is ${data-name}"; + Transformer transform = TransformerBuilder.buildTemplateTransFormer(content, template); + String output = transform.transform(EVENT); + Assertions.assertEquals("Transformers test:data name is test-transformer", output); + + Transformer transformer1 = TransformerBuilder.buildTemplateTransFormer(content, template); + String output1 = transformer1.transform(EVENT); + Assertions.assertEquals("Transformers test:data name is test-transformer", output1); + + } + } diff --git a/eventmesh-runtime-v2/build.gradle b/eventmesh-runtime-v2/build.gradle index 04b460ade3..74b9759b10 100644 --- a/eventmesh-runtime-v2/build.gradle +++ b/eventmesh-runtime-v2/build.gradle @@ -36,6 +36,9 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-connectors:eventmesh-connector-canal") implementation project(":eventmesh-connectors:eventmesh-connector-http") + implementation project(":eventmesh-function:eventmesh-function-api") + implementation project(":eventmesh-function:eventmesh-function-filter") + implementation project(":eventmesh-function:eventmesh-function-transformer") implementation project(":eventmesh-meta:eventmesh-meta-api") implementation project(":eventmesh-meta:eventmesh-meta-nacos") implementation project(":eventmesh-registry:eventmesh-registry-api") diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java index 66ba0a0c3d..4a68001909 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java @@ -17,22 +17,487 @@ package org.apache.eventmesh.runtime.function; +import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceBlockingStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.JobState; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.common.remote.request.FetchJobRequest; +import org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest; +import org.apache.eventmesh.common.remote.request.ReportJobRequest; +import org.apache.eventmesh.common.remote.response.FetchJobResponse; +import org.apache.eventmesh.common.utils.IPUtils; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.function.api.AbstractEventMeshFunctionChain; +import org.apache.eventmesh.function.api.EventMeshFunction; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.patternbuild.PatternBuilder; +import org.apache.eventmesh.function.transformer.Transformer; +import org.apache.eventmesh.function.transformer.TransformerBuilder; +import org.apache.eventmesh.function.transformer.TransformerType; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.factory.ConnectorPluginFactory; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; +import org.apache.commons.lang3.StringUtils; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Random; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.stub.StreamObserver; + +import com.google.protobuf.Any; +import com.google.protobuf.UnsafeByteOperations; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j public class FunctionRuntime implements Runtime { + private final RuntimeInstanceConfig runtimeInstanceConfig; + + private ManagedChannel channel; + + private AdminServiceStub adminServiceStub; + + private AdminServiceBlockingStub adminServiceBlockingStub; + + StreamObserver responseObserver; + + StreamObserver requestObserver; + + private final LinkedBlockingQueue queue; + + private FunctionRuntimeConfig functionRuntimeConfig; + + private AbstractEventMeshFunctionChain functionChain; + + private Sink sinkConnector; + + private Source sourceConnector; + + private final ExecutorService sourceService = ThreadPoolFactory.createSingleExecutor("eventMesh-sourceService"); + + private final ExecutorService sinkService = ThreadPoolFactory.createSingleExecutor("eventMesh-sinkService"); + + private final ScheduledExecutorService heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(); + + private volatile boolean isRunning = false; + + private volatile boolean isFailed = false; + + private String adminServerAddr; + + + public FunctionRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { + this.runtimeInstanceConfig = runtimeInstanceConfig; + this.queue = new LinkedBlockingQueue<>(1000); + } + + @Override public void init() throws Exception { + // load function runtime config from local file + this.functionRuntimeConfig = ConfigService.getInstance().buildConfigInstance(FunctionRuntimeConfig.class); + + // init admin service + initAdminService(); + + // get remote config from admin service and update local config + getAndUpdateRemoteConfig(); + + // init connector service + initConnectorService(); + + // report status to admin server + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.INIT); + } + + private void initAdminService() { + adminServerAddr = getRandomAdminServerAddr(runtimeInstanceConfig.getAdminServiceAddr()); + // create gRPC channel + channel = ManagedChannelBuilder.forTarget(adminServerAddr).usePlaintext().build(); + + adminServiceStub = AdminServiceGrpc.newStub(channel).withWaitForReady(); + + adminServiceBlockingStub = AdminServiceGrpc.newBlockingStub(channel).withWaitForReady(); + + responseObserver = new StreamObserver() { + @Override + public void onNext(Payload response) { + log.info("runtime receive message: {} ", response); + } + + @Override + public void onError(Throwable t) { + log.error("runtime receive error message: {}", t.getMessage()); + } + + @Override + public void onCompleted() { + log.info("runtime finished receive message and completed"); + } + }; + + requestObserver = adminServiceStub.invokeBiStream(responseObserver); + } + + private String getRandomAdminServerAddr(String adminServerAddrList) { + String[] addresses = adminServerAddrList.split(";"); + if (addresses.length == 0) { + throw new IllegalArgumentException("Admin server address list is empty"); + } + Random random = new Random(); + int randomIndex = random.nextInt(addresses.length); + return addresses[randomIndex]; + } + + private void getAndUpdateRemoteConfig() { + String jobId = functionRuntimeConfig.getJobID(); + FetchJobRequest jobRequest = new FetchJobRequest(); + jobRequest.setJobID(jobId); + + Metadata metadata = Metadata.newBuilder().setType(FetchJobRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(jobRequest)))).build()) + .build(); + Payload response = adminServiceBlockingStub.invoke(request); + FetchJobResponse jobResponse = null; + if (response.getMetadata().getType().equals(FetchJobResponse.class.getSimpleName())) { + jobResponse = JsonUtils.parseObject(response.getBody().getValue().toStringUtf8(), FetchJobResponse.class); + } + + if (jobResponse == null || jobResponse.getErrorCode() != ErrorCode.SUCCESS) { + if (jobResponse != null) { + log.error("Failed to get remote config from admin server. ErrorCode: {}, Response: {}", + jobResponse.getErrorCode(), jobResponse); + } else { + log.error("Failed to get remote config from admin server. "); + } + isFailed = true; + try { + stop(); + } catch (Exception e) { + log.error("Failed to stop after exception", e); + } + throw new RuntimeException("Failed to get remote config from admin server."); + } + + // update local config + // source + functionRuntimeConfig.setSourceConnectorType(jobResponse.getTransportType().getSrc().getName()); + functionRuntimeConfig.setSourceConnectorDesc(jobResponse.getConnectorConfig().getSourceConnectorDesc()); + functionRuntimeConfig.setSourceConnectorConfig(jobResponse.getConnectorConfig().getSourceConnectorConfig()); + + // sink + functionRuntimeConfig.setSinkConnectorType(jobResponse.getTransportType().getDst().getName()); + functionRuntimeConfig.setSinkConnectorDesc(jobResponse.getConnectorConfig().getSinkConnectorDesc()); + functionRuntimeConfig.setSinkConnectorConfig(jobResponse.getConnectorConfig().getSinkConnectorConfig()); + + // TODO: update functionConfigs } + + private void initConnectorService() throws Exception { + final JobType jobType = (JobType) functionRuntimeConfig.getRuntimeConfig().get("jobType"); + + // create sink connector + ConnectorCreateService sinkConnectorCreateService = + ConnectorPluginFactory.createConnector(functionRuntimeConfig.getSinkConnectorType() + "-Sink"); + this.sinkConnector = (Sink) sinkConnectorCreateService.create(); + + // parse sink config and init sink connector + SinkConfig sinkConfig = (SinkConfig) ConfigUtil.parse(functionRuntimeConfig.getSinkConnectorConfig(), sinkConnector.configClass()); + SinkConnectorContext sinkConnectorContext = new SinkConnectorContext(); + sinkConnectorContext.setSinkConfig(sinkConfig); + sinkConnectorContext.setRuntimeConfig(functionRuntimeConfig.getRuntimeConfig()); + sinkConnectorContext.setJobType(jobType); + sinkConnector.init(sinkConnectorContext); + + // create source connector + ConnectorCreateService sourceConnectorCreateService = + ConnectorPluginFactory.createConnector(functionRuntimeConfig.getSourceConnectorType() + "-Source"); + this.sourceConnector = (Source) sourceConnectorCreateService.create(); + + // parse source config and init source connector + SourceConfig sourceConfig = (SourceConfig) ConfigUtil.parse(functionRuntimeConfig.getSourceConnectorConfig(), sourceConnector.configClass()); + SourceConnectorContext sourceConnectorContext = new SourceConnectorContext(); + sourceConnectorContext.setSourceConfig(sourceConfig); + sourceConnectorContext.setRuntimeConfig(functionRuntimeConfig.getRuntimeConfig()); + sourceConnectorContext.setJobType(jobType); + + sourceConnector.init(sourceConnectorContext); + } + + private void reportJobRequest(String jobId, JobState jobState) { + ReportJobRequest reportJobRequest = new ReportJobRequest(); + reportJobRequest.setJobID(jobId); + reportJobRequest.setState(jobState); + Metadata metadata = Metadata.newBuilder() + .setType(ReportJobRequest.class.getSimpleName()) + .build(); + Payload payload = Payload.newBuilder() + .setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportJobRequest)))) + .build()) + .build(); + requestObserver.onNext(payload); + } + + @Override public void start() throws Exception { + this.isRunning = true; + + // build function chain + this.functionChain = buildFunctionChain(functionRuntimeConfig.getFunctionConfigs()); + // start heart beat + this.heartBeatExecutor.scheduleAtFixedRate(() -> { + + ReportHeartBeatRequest heartBeat = new ReportHeartBeatRequest(); + heartBeat.setAddress(IPUtils.getLocalAddress()); + heartBeat.setReportedTimeStamp(String.valueOf(System.currentTimeMillis())); + heartBeat.setJobID(functionRuntimeConfig.getJobID()); + + Metadata metadata = Metadata.newBuilder().setType(ReportHeartBeatRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(heartBeat)))).build()) + .build(); + + requestObserver.onNext(request); + }, 5, 5, TimeUnit.SECONDS); + + // start sink service + this.sinkService.execute(() -> { + try { + startSinkConnector(); + } catch (Exception e) { + isFailed = true; + log.error("Sink Connector [{}] failed to start.", sinkConnector.name(), e); + try { + this.stop(); + } catch (Exception ex) { + log.error("Failed to stop after exception", ex); + } + throw new RuntimeException(e); + } + }); + + // start source service + this.sourceService.execute(() -> { + try { + startSourceConnector(); + } catch (Exception e) { + isFailed = true; + log.error("Source Connector [{}] failed to start.", sourceConnector.name(), e); + try { + this.stop(); + } catch (Exception ex) { + log.error("Failed to stop after exception", ex); + } + throw new RuntimeException(e); + } + }); + + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.RUNNING); } + private StringEventMeshFunctionChain buildFunctionChain(List> functionConfigs) { + StringEventMeshFunctionChain functionChain = new StringEventMeshFunctionChain(); + + // build function chain + for (Map functionConfig : functionConfigs) { + String functionType = String.valueOf(functionConfig.getOrDefault("functionType", "")); + if (StringUtils.isEmpty(functionType)) { + throw new IllegalArgumentException("'functionType' is required for function"); + } + + // build function based on functionType + EventMeshFunction function; + switch (functionType) { + case "filter": + function = buildFilter(functionConfig); + break; + case "transformer": + function = buildTransformer(functionConfig); + break; + default: + throw new IllegalArgumentException( + "Invalid functionType: '" + functionType + "'. Supported functionType: 'filter', 'transformer'"); + } + + // add function to functionChain + functionChain.addLast(function); + } + + return functionChain; + } + + + @SuppressWarnings("unchecked") + private Pattern buildFilter(Map functionConfig) { + // get condition from attributes + Object condition = functionConfig.get("condition"); + if (condition == null) { + throw new IllegalArgumentException("'condition' is required for filter function"); + } + if (condition instanceof String) { + return PatternBuilder.build(String.valueOf(condition)); + } else if (condition instanceof Map) { + return PatternBuilder.build((Map) condition); + } else { + throw new IllegalArgumentException("Invalid condition"); + } + } + + private Transformer buildTransformer(Map functionConfig) { + // get transformerType from attributes + String transformerTypeStr = String.valueOf(functionConfig.getOrDefault("transformerType", "")).toLowerCase(); + TransformerType transformerType = TransformerType.getItem(transformerTypeStr); + if (transformerType == null) { + throw new IllegalArgumentException( + "Invalid transformerType: '" + transformerTypeStr + + "'. Supported transformerType: 'constant', 'template', 'original' (case insensitive)"); + } + + // build transformer + Transformer transformer = null; + + switch (transformerType) { + case CONSTANT: + // check value + String content = String.valueOf(functionConfig.getOrDefault("content", "")); + if (StringUtils.isEmpty(content)) { + throw new IllegalArgumentException("'content' is required for constant transformer"); + } + transformer = TransformerBuilder.buildConstantTransformer(content); + break; + case TEMPLATE: + // check value and template + Object valueMap = functionConfig.get("valueMap"); + String template = String.valueOf(functionConfig.getOrDefault("template", "")); + if (valueMap == null || StringUtils.isEmpty(template)) { + throw new IllegalArgumentException("'valueMap' and 'template' are required for template transformer"); + } + transformer = TransformerBuilder.buildTemplateTransFormer(valueMap, template); + break; + case ORIGINAL: + // ORIGINAL transformer does not need any parameter + break; + default: + throw new IllegalArgumentException( + "Invalid transformerType: '" + transformerType + "', supported transformerType: 'CONSTANT', 'TEMPLATE', 'ORIGINAL'"); + } + + return transformer; + } + + + private void startSinkConnector() throws Exception { + // start sink connector + this.sinkConnector.start(); + + // try to get data from queue and send it. + while (this.isRunning) { + ConnectRecord connectRecord = null; + try { + connectRecord = queue.poll(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + log.error("Failed to poll data from queue.", e); + Thread.currentThread().interrupt(); + } + + // send data if not null + if (connectRecord != null) { + sinkConnector.put(Collections.singletonList(connectRecord)); + } + } + } + + private void startSourceConnector() throws Exception { + // start source connector + this.sourceConnector.start(); + + // try to get data from source connector and handle it. + while (this.isRunning) { + List connectorRecordList = sourceConnector.poll(); + + // handle data + if (connectorRecordList != null && !connectorRecordList.isEmpty()) { + for (ConnectRecord connectRecord : connectorRecordList) { + if (connectRecord == null || connectRecord.getData() == null) { + // If data is null, just put it into queue. + this.queue.put(connectRecord); + } else { + // Apply function chain to data + String data = functionChain.apply((String) connectRecord.getData()); + if (data != null) { + if (log.isDebugEnabled()) { + log.debug("Function chain applied. Original data: {}, Transformed data: {}", connectRecord.getData(), data); + } + connectRecord.setData(data); + this.queue.put(connectRecord); + } else if (log.isDebugEnabled()) { + log.debug("Data filtered out by function chain. Original data: {}", connectRecord.getData()); + } + } + } + } + } + } + + @Override public void stop() throws Exception { + log.info("FunctionRuntime is stopping..."); + + isRunning = false; + + if (isFailed) { + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.FAIL); + } else { + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.COMPLETE); + } + + sinkConnector.stop(); + sourceConnector.stop(); + sinkService.shutdown(); + sourceService.shutdown(); + heartBeatExecutor.shutdown(); + + requestObserver.onCompleted(); + if (channel != null && !channel.isShutdown()) { + channel.shutdown(); + } + log.info("FunctionRuntime stopped."); } } diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java index 40aec65e99..4d57c83e82 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java @@ -17,5 +17,40 @@ package org.apache.eventmesh.runtime.function; +import org.apache.eventmesh.common.config.Config; + +import java.util.List; +import java.util.Map; + + +import lombok.Data; + +@Data +@Config(path = "classPath://function.yaml") public class FunctionRuntimeConfig { + + private String functionRuntimeInstanceId; + + private String taskID; + + private String jobID; + + private String region; + + private Map runtimeConfig; + + private String sourceConnectorType; + + private String sourceConnectorDesc; + + private Map sourceConnectorConfig; + + private String sinkConnectorType; + + private String sinkConnectorDesc; + + private Map sinkConnectorConfig; + + private List> functionConfigs; + } diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java index 3ba91986cb..40346e272f 100644 --- a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java @@ -30,7 +30,7 @@ public void init() throws Exception { @Override public Runtime createRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { - return null; + return new FunctionRuntime(runtimeInstanceConfig); } @Override diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java new file mode 100644 index 0000000000..0035999ecb --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.function; + +import org.apache.eventmesh.function.api.AbstractEventMeshFunctionChain; +import org.apache.eventmesh.function.api.EventMeshFunction; + +/** + * ConnectRecord Function Chain. + */ +public class StringEventMeshFunctionChain extends AbstractEventMeshFunctionChain { + + @Override + public String apply(String content) { + for (EventMeshFunction function : functions) { + if (content == null) { + break; + } + content = function.apply(content); + } + return content; + } +} diff --git a/eventmesh-runtime-v2/src/main/resources/function.yaml b/eventmesh-runtime-v2/src/main/resources/function.yaml new file mode 100644 index 0000000000..eae2b063ec --- /dev/null +++ b/eventmesh-runtime-v2/src/main/resources/function.yaml @@ -0,0 +1,21 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +taskID: c6233632-ab9a-4aba-904f-9d22fba6aa74 +jobID: 8190fe5b-1f9b-4815-8983-2467e76edbf0 +region: region1 + diff --git a/eventmesh-runtime/build.gradle b/eventmesh-runtime/build.gradle index 95924faad4..b016e18bfe 100644 --- a/eventmesh-runtime/build.gradle +++ b/eventmesh-runtime/build.gradle @@ -36,8 +36,10 @@ dependencies { implementation "commons-validator:commons-validator" implementation project(":eventmesh-common") - implementation project(":eventmesh-filter") implementation project(":eventmesh-spi") + implementation project(":eventmesh-function:eventmesh-function-api") + implementation project(":eventmesh-function:eventmesh-function-filter") + implementation project(":eventmesh-function:eventmesh-function-transformer") implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") implementation project(":eventmesh-storage-plugin:eventmesh-storage-standalone") implementation project(":eventmesh-storage-plugin:eventmesh-storage-rocketmq") @@ -45,7 +47,6 @@ dependencies { implementation project(":eventmesh-security-plugin:eventmesh-security-acl") implementation project(":eventmesh-security-plugin:eventmesh-security-auth-http-basic") implementation project(":eventmesh-security-plugin:eventmesh-security-auth-token") - implementation project(":eventmesh-transformer") implementation project(":eventmesh-meta:eventmesh-meta-api") implementation project(":eventmesh-meta:eventmesh-meta-nacos") implementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java index bf6eb9dadc..14677dc690 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java @@ -19,8 +19,8 @@ import org.apache.eventmesh.api.meta.MetaServiceListener; import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.filter.pattern.Pattern; -import org.apache.eventmesh.filter.patternbuild.PatternBuilder; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.patternbuild.PatternBuilder; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerGroupManager; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerManager; import org.apache.eventmesh.runtime.core.protocol.producer.EventMeshProducer; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java index 551bcb2799..1d2f8ca30c 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java @@ -19,14 +19,14 @@ import org.apache.eventmesh.api.meta.MetaServiceListener; import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.function.transformer.Transformer; +import org.apache.eventmesh.function.transformer.TransformerBuilder; +import org.apache.eventmesh.function.transformer.TransformerParam; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerGroupManager; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerManager; import org.apache.eventmesh.runtime.core.protocol.producer.EventMeshProducer; import org.apache.eventmesh.runtime.core.protocol.producer.ProducerManager; import org.apache.eventmesh.runtime.meta.MetaStorage; -import org.apache.eventmesh.transformer.Transformer; -import org.apache.eventmesh.transformer.TransformerBuilder; -import org.apache.eventmesh.transformer.TransformerParam; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java index b30238a28c..0e41d827ab 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java @@ -31,7 +31,8 @@ import org.apache.eventmesh.common.utils.IPUtils; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.common.utils.RandomStringUtils; -import org.apache.eventmesh.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.transformer.Transformer; import org.apache.eventmesh.protocol.api.ProtocolAdaptor; import org.apache.eventmesh.protocol.api.ProtocolPluginFactory; import org.apache.eventmesh.runtime.acl.Acl; @@ -44,7 +45,6 @@ import org.apache.eventmesh.runtime.util.EventMeshUtil; import org.apache.eventmesh.runtime.util.RemotingHelper; import org.apache.eventmesh.trace.api.common.EventMeshTraceConstants; -import org.apache.eventmesh.transformer.Transformer; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java index be95971536..69506ede8a 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java @@ -30,14 +30,14 @@ import org.apache.eventmesh.common.utils.IPUtils; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.common.utils.RandomStringUtils; -import org.apache.eventmesh.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.transformer.Transformer; import org.apache.eventmesh.protocol.api.ProtocolAdaptor; import org.apache.eventmesh.protocol.api.ProtocolPluginFactory; import org.apache.eventmesh.runtime.constants.EventMeshConstants; import org.apache.eventmesh.runtime.core.protocol.http.consumer.HandleMsgContext; import org.apache.eventmesh.runtime.util.EventMeshUtil; import org.apache.eventmesh.runtime.util.WebhookUtil; -import org.apache.eventmesh.transformer.Transformer; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateFormatUtils; diff --git a/settings.gradle b/settings.gradle index a22363c809..c49a5c4242 100644 --- a/settings.gradle +++ b/settings.gradle @@ -47,8 +47,6 @@ include 'eventmesh-common' include 'eventmesh-starter' include 'eventmesh-examples' include 'eventmesh-spi' -include 'eventmesh-filter' -include 'eventmesh-transformer' include 'eventmesh-openconnect:eventmesh-openconnect-java' include 'eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api' @@ -133,3 +131,8 @@ include 'eventmesh-registry' include 'eventmesh-registry:eventmesh-registry-api' include 'eventmesh-registry:eventmesh-registry-nacos' +include 'eventmesh-function' +include 'eventmesh-function:eventmesh-function-api' +include 'eventmesh-function:eventmesh-function-filter' +include 'eventmesh-function:eventmesh-function-transformer' + From 510cf2d401db48034c2f30ad55b5c41361b96685 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melis=20=C3=96lmez?= <77929541+melisolmez@users.noreply.github.com> Date: Mon, 28 Oct 2024 06:58:37 +0300 Subject: [PATCH 44/51] [ISSUE #4991] add unit test for LogUtil (#5100) --- .../eventmesh/common/utils/LogUtilTest.java | 199 ++++++++++++++++++ 1 file changed, 199 insertions(+) create mode 100644 eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java new file mode 100644 index 0000000000..03c52dadad --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.eventmesh.common.utils; + +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import java.util.function.Supplier; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.slf4j.Logger; +import org.slf4j.spi.LoggingEventBuilder; + +@ExtendWith(MockitoExtension.class) +class LogUtilTest { + + private Logger mockLogger; + private LoggingEventBuilder mockEventBuilder; + private Supplier supplier; + private String logMessage; + + @BeforeEach + void setUp() { + + mockLogger = mock(Logger.class); + mockEventBuilder = mock(LoggingEventBuilder.class); + + supplier = () -> "{\"orderId\": 12345, \"amount\": 100}"; + logMessage = "Processing order with data: {}"; + } + + @Test + void testDebugLogsWithSupplier() { + + doReturn(mockEventBuilder).when(mockLogger).atDebug(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + + LogUtil.debug(mockLogger, logMessage, supplier); + + verify(mockLogger).atDebug(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testDebugLogsWithSupplierAndException() { + Throwable throwable = new RuntimeException("Order processing failed"); + + + doReturn(mockEventBuilder).when(mockLogger).atDebug(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).setCause(throwable); + + LogUtil.debug(mockLogger, logMessage, supplier, throwable); + + verify(mockLogger).atDebug(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).setCause(throwable); + verify(mockEventBuilder).log(logMessage); + } + + @Test + void testDebugLogsWithSuppliers() { + + Supplier supplier2 = () -> "{\"orderId\": 67890, \"amount\": 200}"; + + doReturn(mockEventBuilder).when(mockLogger).atDebug(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier2)); + + LogUtil.debug(mockLogger, logMessage, supplier, supplier2); + + verify(mockLogger).atDebug(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).addArgument(same(supplier2)); + verify(mockEventBuilder).log(logMessage); + } + + @Test + void testInfoLogsWithSupplier() { + + doReturn(mockEventBuilder).when(mockLogger).atInfo(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + + LogUtil.info(mockLogger, logMessage, supplier); + + verify(mockLogger).atInfo(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testInfoLogsWithSupplierAndException() { + + Throwable throwable = new RuntimeException("Order processing failed"); + + doReturn(mockEventBuilder).when(mockLogger).atInfo(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).setCause(throwable); + + LogUtil.info(mockLogger, logMessage, supplier, throwable); + + verify(mockLogger).atInfo(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).setCause(throwable); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testInfoLogsWithSuppliers() { + + Supplier supplier2 = () -> "{\"orderId\": 67890, \"amount\": 200}"; + + doReturn(mockEventBuilder).when(mockLogger).atInfo(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier2)); + + LogUtil.info(mockLogger, logMessage, supplier, supplier2); + + verify(mockLogger).atInfo(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).addArgument(same(supplier2)); + verify(mockEventBuilder).log(logMessage); + } + + @Test + void testWarnLogsWithSupplier() { + + doReturn(mockEventBuilder).when(mockLogger).atWarn(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + + LogUtil.warn(mockLogger, logMessage, supplier); + + verify(mockLogger).atWarn(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testWarnLogsWithSupplierAndException() { + + Throwable throwable = new RuntimeException("Order processing failed"); + + doReturn(mockEventBuilder).when(mockLogger).atWarn(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).setCause(throwable); + + LogUtil.warn(mockLogger, logMessage, supplier, throwable); + + verify(mockLogger).atWarn(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).setCause(throwable); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testWarnLogsWithSuppliers() { + + Supplier supplier2 = () -> "{\"orderId\": 67890, \"amount\": 200}"; + + doReturn(mockEventBuilder).when(mockLogger).atWarn(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier2)); + + LogUtil.warn(mockLogger, logMessage, supplier, supplier2); + + verify(mockLogger).atWarn(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).addArgument(same(supplier2)); + verify(mockEventBuilder).log(logMessage); + } + +} \ No newline at end of file From da5fdbfdbd18fb10cba7606d953e8ab2edabee56 Mon Sep 17 00:00:00 2001 From: Zaki Date: Mon, 28 Oct 2024 19:08:59 +0800 Subject: [PATCH 45/51] [ISSUE #5101] Define and standardize some common configurations for all Sources(#5102) --- .../common/config/connector/Constants.java | 17 +++++++ .../common/config/connector/PollConfig.java | 43 ++++++++++++++++ .../common/config/connector/SourceConfig.java | 3 ++ .../connector/http/SourceConnectorConfig.java | 8 +-- .../mq/kafka/SourceConnectorConfig.java | 1 - .../connector/CanalSourceCheckConnector.java | 8 ++- .../connector/CanalSourceFullConnector.java | 7 ++- .../connector/ChatGPTSourceConnector.java | 22 ++++++--- .../common/SynchronizedCircularFifoQueue.java | 1 - .../http/source/HttpSourceConnector.java | 49 ++++++++++++------- .../http/source/protocol/Protocol.java | 5 +- .../protocol/impl/CloudEventProtocol.java | 5 +- .../source/protocol/impl/CommonProtocol.java | 4 +- .../source/protocol/impl/GitHubProtocol.java | 4 +- .../src/main/resources/source-config.yml | 2 - .../src/test/resources/source-config.yml | 2 - .../jdbc/source/JdbcSourceConnector.java | 9 ++-- .../jdbc/source/TaskManagerCoordinator.java | 29 ++++++----- .../connector/KafkaSourceConnector.java | 6 +-- .../connector/MongodbSourceConnector.java | 24 ++++++--- .../OpenFunctionSourceConnector.java | 30 +++++++++--- .../connector/PravegaSourceConnector.java | 24 ++++++--- .../connector/RabbitMQSourceConnector.java | 24 ++++++--- .../connector/RedisSourceConnector.java | 24 ++++++--- .../connector/SpringSourceConnector.java | 28 ++++++++--- 25 files changed, 267 insertions(+), 112 deletions(-) create mode 100644 eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java index 74576e843a..817efb6d3a 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java @@ -30,4 +30,21 @@ public class Constants { public static final int DEFAULT_ATTEMPT = 3; public static final int DEFAULT_PORT = 8080; + + // ======================== Source Constants ======================== + /** + * Default capacity + */ + public static final int DEFAULT_CAPACITY = 1024; + + /** + * Default poll batch size + */ + public static final int DEFAULT_POLL_BATCH_SIZE = 10; + + /** + * Default poll timeout (unit: ms) + */ + public static final long DEFAULT_POLL_TIMEOUT = 5000L; + } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java new file mode 100644 index 0000000000..cf3f06be91 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector; + +import lombok.Data; + +/** + * Source Poll Config + */ +@Data +public class PollConfig { + + /** + * Capacity of the poll queue + */ + private int capacity = Constants.DEFAULT_CAPACITY; + + /** + * Max batch size of the poll + */ + private int maxBatchSize = Constants.DEFAULT_POLL_BATCH_SIZE; + + /** + * Max wait time of the poll + */ + private long maxWaitTime = Constants.DEFAULT_POLL_TIMEOUT; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java index 7630631258..f7bc42970c 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java @@ -30,4 +30,7 @@ public abstract class SourceConfig extends Config { private OffsetStorageConfig offsetStorageConfig; + // Polling configuration, e.g. capacity, batch size, wait time, etc. + private PollConfig pollConfig = new PollConfig(); + } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java index 58d910bf2d..282f883332 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java @@ -44,13 +44,7 @@ public class SourceConnectorConfig { */ private int maxFormAttributeSize = 1024 * 1024; - // max size of the queue, default 1000 - private int maxStorageSize = 1000; - - // batch size, default 10 - private int batchSize = 10; - - // protocol, default CloudEvent + // protocol, default Common private String protocol = "Common"; // extra config, e.g. GitHub secret diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java index 21fb18eb23..eb7406f664 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java @@ -32,5 +32,4 @@ public class SourceConnectorConfig { private String enableAutoCommit = "false"; private String sessionTimeoutMS = "10000"; private String maxPollRecords = "1000"; - private int pollTimeOut = 100; } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java index 841c9a4814..bd85f03240 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java @@ -50,12 +50,14 @@ @Slf4j public class CanalSourceCheckConnector extends AbstractComponent implements Source, ConnectorCreateService { + private CanalSourceFullConfig config; private CanalFullPositionMgr positionMgr; private RdbTableMgr tableMgr; private ThreadPoolExecutor executor; - private final BlockingQueue> queue = new LinkedBlockingQueue<>(); + private BlockingQueue> queue; private final AtomicBoolean flag = new AtomicBoolean(true); + private long maxPollWaitTime; @Override protected void run() throws Exception { @@ -140,6 +142,8 @@ private void init() { DatabaseConnection.initSourceConnection(); this.tableMgr = new RdbTableMgr(config.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); this.positionMgr = new CanalFullPositionMgr(config, tableMgr); + this.maxPollWaitTime = config.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(config.getPollConfig().getCapacity()); } @Override @@ -168,7 +172,7 @@ public void onException(ConnectRecord record) { public List poll() { while (flag.get()) { try { - List records = queue.poll(5, TimeUnit.SECONDS); + List records = queue.poll(maxPollWaitTime, TimeUnit.MILLISECONDS); if (records == null || records.isEmpty()) { continue; } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java index c2632ee472..09e2e0dcf7 100644 --- a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java @@ -56,8 +56,9 @@ public class CanalSourceFullConnector extends AbstractComponent implements Sourc private CanalFullPositionMgr positionMgr; private RdbTableMgr tableMgr; private ThreadPoolExecutor executor; - private final BlockingQueue> queue = new LinkedBlockingQueue<>(); + private BlockingQueue> queue; private final AtomicBoolean flag = new AtomicBoolean(true); + private long maxPollWaitTime; @Override protected void run() throws Exception { @@ -137,6 +138,8 @@ private void init() { DatabaseConnection.initSourceConnection(); this.tableMgr = new RdbTableMgr(config.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); this.positionMgr = new CanalFullPositionMgr(config, tableMgr); + this.maxPollWaitTime = config.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(config.getPollConfig().getCapacity()); } @Override @@ -166,7 +169,7 @@ public void onException(ConnectRecord record) { public List poll() { while (flag.get()) { try { - List records = queue.poll(5, TimeUnit.SECONDS); + List records = queue.poll(maxPollWaitTime, TimeUnit.MILLISECONDS); if (records == null || records.isEmpty()) { continue; } diff --git a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java index 6b122087e5..1b6955feb2 100644 --- a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java @@ -61,8 +61,6 @@ @Slf4j public class ChatGPTSourceConnector implements Source { - private static final int DEFAULT_BATCH_SIZE = 10; - private ChatGPTSourceConfig sourceConfig; private BlockingQueue queue; private HttpServer server; @@ -79,6 +77,9 @@ public class ChatGPTSourceConnector implements Source { private static final String APPLICATION_JSON = "application/json"; private static final String TEXT_PLAIN = "text/plain"; + private int maxBatchSize; + private long maxPollWaitTime; + @Override public Class configClass() { @@ -129,7 +130,9 @@ private void doInit() { if (StringUtils.isNotEmpty(parsePromptTemplateStr)) { this.parseHandler = new ParseHandler(openaiManager, parsePromptTemplateStr); } - this.queue = new LinkedBlockingQueue<>(1024); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); final Vertx vertx = Vertx.vertx(); final Router router = Router.router(vertx); router.route().path(this.sourceConfig.connectorConfig.getPath()).method(HttpMethod.POST).handler(BodyHandler.create()).handler(ctx -> { @@ -239,14 +242,21 @@ public void stop() { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int i = 0; i < DEFAULT_BATCH_SIZE; i++) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int i = 0; i < maxBatchSize; i++) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java index 0564e58734..9989552d1e 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java @@ -142,7 +142,6 @@ public synchronized List fetchRange(int start, int end, boolean removed) { count++; } return items; - } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java index 2b2a01a9dd..6c78badaf4 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java @@ -20,7 +20,6 @@ import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.config.connector.http.HttpSourceConfig; import org.apache.eventmesh.common.exception.EventMeshException; -import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; import org.apache.eventmesh.connector.http.source.protocol.Protocol; import org.apache.eventmesh.connector.http.source.protocol.ProtocolFactory; import org.apache.eventmesh.openconnect.api.ConnectorCreateService; @@ -30,8 +29,9 @@ import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.util.ArrayList; -import java.util.Collections; import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import io.netty.handler.codec.http.HttpResponseStatus; @@ -50,9 +50,11 @@ public class HttpSourceConnector implements Source, ConnectorCreateService queue; + private BlockingQueue queue; - private int batchSize; + private int maxBatchSize; + + private long maxPollWaitTime; private Route route; @@ -92,11 +94,11 @@ public void init(ConnectorContext connectorContext) { private void doInit() { // init queue - int maxQueueSize = this.sourceConfig.getConnectorConfig().getMaxStorageSize(); - this.queue = new SynchronizedCircularFifoQueue<>(maxQueueSize); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); - // init batch size - this.batchSize = this.sourceConfig.getConnectorConfig().getBatchSize(); + // init poll batch size and timeout + this.maxBatchSize = this.sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = this.sourceConfig.getPollConfig().getMaxWaitTime(); // init protocol String protocolName = this.sourceConfig.getConnectorConfig().getProtocol(); @@ -183,20 +185,29 @@ public void stop() { @Override public List poll() { - // if queue is empty, return empty list - if (queue.isEmpty()) { - return Collections.emptyList(); - } + // record current time + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + // poll from queue - List connectRecords = new ArrayList<>(batchSize); - for (int i = 0; i < batchSize; i++) { - Object obj = queue.poll(); - if (obj == null) { + List connectRecords = new ArrayList<>(maxBatchSize); + for (int i = 0; i < maxBatchSize; i++) { + try { + Object obj = queue.poll(remainingTime, TimeUnit.MILLISECONDS); + if (obj == null) { + break; + } + // convert to ConnectRecord + ConnectRecord connectRecord = protocol.convertToConnectRecord(obj); + connectRecords.add(connectRecord); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; + } catch (Exception e) { + log.error("Failed to poll from queue.", e); break; } - // convert to ConnectRecord - ConnectRecord connectRecord = protocol.convertToConnectRecord(obj); - connectRecords.add(connectRecord); } return connectRecords; } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java index b671383e54..c5a22139e0 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java @@ -18,9 +18,10 @@ package org.apache.eventmesh.connector.http.source.protocol; import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; -import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import java.util.concurrent.BlockingQueue; + import io.vertx.ext.web.Route; @@ -45,7 +46,7 @@ public interface Protocol { * @param route route * @param queue queue info */ - void setHandler(Route route, SynchronizedCircularFifoQueue queue); + void setHandler(Route route, BlockingQueue queue); /** diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java index 4906e920f2..a44ed0e90c 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java @@ -18,12 +18,13 @@ package org.apache.eventmesh.connector.http.source.protocol.impl; import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; -import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; import org.apache.eventmesh.connector.http.source.data.CommonResponse; import org.apache.eventmesh.connector.http.source.protocol.Protocol; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.eventmesh.openconnect.util.CloudEventUtil; +import java.util.concurrent.BlockingQueue; + import io.cloudevents.CloudEvent; import io.cloudevents.http.vertx.VertxMessageFactory; import io.netty.handler.codec.http.HttpResponseStatus; @@ -60,7 +61,7 @@ public void initialize(SourceConnectorConfig sourceConnectorConfig) { * @param queue queue info */ @Override - public void setHandler(Route route, SynchronizedCircularFifoQueue queue) { + public void setHandler(Route route, BlockingQueue queue) { route.method(HttpMethod.POST) .handler(ctx -> VertxMessageFactory.createReader(ctx.request()) .map(reader -> { diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java index 0761170ac0..e831dc9723 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java @@ -20,7 +20,6 @@ import org.apache.eventmesh.common.Constants; import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; import org.apache.eventmesh.connector.http.source.data.CommonResponse; import org.apache.eventmesh.connector.http.source.data.WebhookRequest; import org.apache.eventmesh.connector.http.source.protocol.Protocol; @@ -28,6 +27,7 @@ import java.util.Base64; import java.util.Map; +import java.util.concurrent.BlockingQueue; import java.util.stream.Collectors; import io.netty.handler.codec.http.HttpResponseStatus; @@ -66,7 +66,7 @@ public void initialize(SourceConnectorConfig sourceConnectorConfig) { * @param queue queue info */ @Override - public void setHandler(Route route, SynchronizedCircularFifoQueue queue) { + public void setHandler(Route route, BlockingQueue queue) { route.method(HttpMethod.POST) .handler(BodyHandler.create()) .handler(ctx -> { diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java index fac8c0d801..e1edbd0faf 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java @@ -20,7 +20,6 @@ import org.apache.eventmesh.common.Constants; import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; import org.apache.eventmesh.common.exception.EventMeshException; -import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; import org.apache.eventmesh.connector.http.source.data.CommonResponse; import org.apache.eventmesh.connector.http.source.data.WebhookRequest; import org.apache.eventmesh.connector.http.source.protocol.Protocol; @@ -31,6 +30,7 @@ import org.apache.commons.lang3.StringUtils; import java.util.Map; +import java.util.concurrent.BlockingQueue; import java.util.stream.Collectors; import javax.crypto.Mac; @@ -90,7 +90,7 @@ public void initialize(SourceConnectorConfig sourceConnectorConfig) { * @param queue queue info */ @Override - public void setHandler(Route route, SynchronizedCircularFifoQueue queue) { + public void setHandler(Route route, BlockingQueue queue) { route.method(HttpMethod.POST) .handler(BodyHandler.create()) .handler(ctx -> { diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml index b1edc084ff..0a73e627b0 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml @@ -30,8 +30,6 @@ connectorConfig: port: 3755 idleTimeout: 5000 # timeunit: ms maxFormAttributeSize: 1048576 # timeunit: byte, default: 1048576(1MB). This applies only when handling form data submissions. - maxStorageSize: 1000 # max storage size, default: 1000 - batchSize: 10 # batch size, default: 10 protocol: CloudEvent # Case insensitive, default: CloudEvent, options: CloudEvent, GitHub, Common extraConfig: # extra config for different protocol, e.g. GitHub secret secret: xxxxxxx # GitHub secret diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml b/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml index 735d3b01d7..336bb2cb5e 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml @@ -30,8 +30,6 @@ connectorConfig: port: 3755 idleTimeout: 5000 # timeunit: ms maxFormAttributeSize: 1048576 # timeunit: byte, default: 1048576(1MB). This applies only when handling form data submissions. - maxStorageSize: 1000 # max storage size, default: 1000 - batchSize: 10 # batch size, default: 10 protocol: CloudEvent # Case insensitive, default: CloudEvent, options: CloudEvent, GitHub, Common extraConfig: # extra config for different protocol, e.g. GitHub secret secret: xxxxxxx # GitHub secret diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java index 810a59e723..ecc5a44154 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java @@ -142,7 +142,9 @@ private void doInit() { this.dispatcher = new EventDispatcher(this.sourceJdbcTaskManager); - this.taskManagerCoordinator = new TaskManagerCoordinator(); + this.taskManagerCoordinator = new TaskManagerCoordinator(sourceConfig.getPollConfig().getCapacity(), + sourceConfig.getPollConfig().getMaxBatchSize(), + sourceConfig.getPollConfig().getMaxWaitTime()); this.taskManagerCoordinator.registerTaskManager(SourceJdbcTaskManager.class.getName(), sourceJdbcTaskManager); this.taskManagerCoordinator.init(); } @@ -209,9 +211,6 @@ public void stop() throws Exception { @Override public List poll() { - - List connectRecords = this.taskManagerCoordinator.poll(); - - return connectRecords; + return this.taskManagerCoordinator.poll(); } } diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java index c299fbc531..8efb8cbc71 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java @@ -40,16 +40,16 @@ @Slf4j public class TaskManagerCoordinator { - private static final int BATCH_MAX = 10; - private static final int DEFAULT_QUEUE_SIZE = 1 << 13; + private final BlockingQueue recordBlockingQueue; + private final Map taskManagerCache = new HashMap<>(8); + private final int maxBatchSize; + private final long maxPollTimeout; - private BlockingQueue recordBlockingQueue = new LinkedBlockingQueue<>(DEFAULT_QUEUE_SIZE); - private Map taskManagerCache = new HashMap<>(8); - /** - * Constructs a new TaskManagerCoordinator. - */ - public TaskManagerCoordinator() { + public TaskManagerCoordinator(int capacity, int maxBatchSize, long maxPollTimeout) { + this.recordBlockingQueue = new LinkedBlockingQueue<>(capacity); + this.maxBatchSize = maxBatchSize; + this.maxPollTimeout = maxPollTimeout; } /** @@ -96,10 +96,13 @@ public void start() { * @return A list of ConnectRecords, up to the maximum batch size defined by BATCH_MAX. */ public List poll() { - List records = new ArrayList<>(BATCH_MAX); - for (int index = 0; index < BATCH_MAX; ++index) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollTimeout; + + List records = new ArrayList<>(maxBatchSize); + for (int index = 0; index < maxBatchSize; ++index) { try { - ConnectRecord record = recordBlockingQueue.poll(3, TimeUnit.SECONDS); + ConnectRecord record = recordBlockingQueue.poll(remainingTime, TimeUnit.MILLISECONDS); if (Objects.isNull(record)) { break; } @@ -107,6 +110,10 @@ public List poll() { log.debug("record:{}", JsonUtils.toJSONString(record)); } records.add(record); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollTimeout > elapsedTime ? maxPollTimeout - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java index d573126934..f771e907cb 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java @@ -45,7 +45,7 @@ public class KafkaSourceConnector implements Source { private KafkaConsumer kafkaConsumer; - private int pollTimeOut = 100; + private long maxPollWaitTime; @Override public Class configClass() { @@ -75,7 +75,7 @@ private void doInit() { props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, sourceConfig.getConnectorConfig().getMaxPollRecords()); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, sourceConfig.getConnectorConfig().getAutoCommitIntervalMS()); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sourceConfig.getConnectorConfig().getSessionTimeoutMS()); - this.pollTimeOut = sourceConfig.getConnectorConfig().getPollTimeOut(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); this.kafkaConsumer = new KafkaConsumer<>(props); } @@ -106,7 +106,7 @@ public void stop() { @Override public List poll() { - ConsumerRecords records = kafkaConsumer.poll(Duration.ofMillis(pollTimeOut)); + ConsumerRecords records = kafkaConsumer.poll(Duration.ofMillis(maxPollWaitTime)); List connectRecords = new ArrayList<>(records.count()); for (ConsumerRecord record : records) { Long timestamp = System.currentTimeMillis(); diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java index df3f66d6a6..1d1dcc1843 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java @@ -42,10 +42,12 @@ public class MongodbSourceConnector implements Source { private MongodbSourceConfig sourceConfig; - private static final int DEFAULT_BATCH_SIZE = 10; - private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + private MongodbSourceClient client; @Override @@ -67,7 +69,9 @@ public void init(ConnectorContext connectorContext) throws Exception { } private void doInit() { - this.queue = new LinkedBlockingQueue<>(1000); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); String connectorType = sourceConfig.getConnectorConfig().getConnectorType(); if (connectorType.equals(ClusterType.STANDALONE.name())) { this.client = new MongodbStandaloneSourceClient(sourceConfig.getConnectorConfig(), queue); @@ -105,15 +109,21 @@ public void stop() throws Exception { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java index 534ecfb79d..e40c451ff8 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java @@ -35,12 +35,14 @@ @Slf4j public class OpenFunctionSourceConnector implements Source { - private static final int DEFAULT_BATCH_SIZE = 10; - private OpenFunctionSourceConfig sourceConfig; private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + @Override public Class configClass() { return OpenFunctionSourceConfig.class; @@ -50,7 +52,7 @@ public Class configClass() { public void init(Config config) throws Exception { // init config for openfunction source connector this.sourceConfig = (OpenFunctionSourceConfig) config; - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); } @Override @@ -58,7 +60,14 @@ public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; // init config for openfunction source connector this.sourceConfig = (OpenFunctionSourceConfig) sourceConnectorContext.getSourceConfig(); - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); + } + + private void doInit() { + // init config for openfunction source connector + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); } @Override @@ -92,16 +101,21 @@ public BlockingQueue queue() { @Override public List poll() { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - ConnectRecord connectRecord = queue.poll(3, TimeUnit.SECONDS); + ConnectRecord connectRecord = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (connectRecord == null) { break; } connectRecords.add(connectRecord); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { Thread currentThread = Thread.currentThread(); log.warn("[OpenFunctionSourceConnector] Interrupting thread {} due to exception {}", diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java index 836779dbcf..4b5e4751b3 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java @@ -57,8 +57,6 @@ public class PravegaSourceConnector implements Source { private static final AtomicBoolean started = new AtomicBoolean(false); - private static final int DEFAULT_BATCH_SIZE = 10; - private PravegaSourceConfig sourceConfig; private StreamManager streamManager; @@ -71,6 +69,10 @@ public class PravegaSourceConnector implements Source { private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + private final ThreadPoolExecutor executor = ThreadPoolFactory.createThreadPoolExecutor( Runtime.getRuntime().availableProcessors() * 2, Runtime.getRuntime().availableProcessors() * 2, @@ -89,7 +91,9 @@ public void init(Config config) throws Exception { public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; this.sourceConfig = (PravegaSourceConfig) sourceConnectorContext.getSourceConfig(); - this.queue = new LinkedBlockingQueue<>(1000); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); streamManager = StreamManager.create(sourceConfig.getConnectorConfig().getControllerURI()); ClientConfig.ClientConfigBuilder clientConfigBuilder = @@ -168,15 +172,21 @@ public void stop() { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java index 0b7e726bda..a19b159c1c 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java @@ -54,10 +54,12 @@ public class RabbitMQSourceConnector implements Source { private volatile boolean started = false; - private static final int DEFAULT_BATCH_SIZE = 10; - private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + private final RabbitmqConnectionFactory rabbitmqConnectionFactory = new RabbitmqConnectionFactory(); private RabbitMQSourceHandler rabbitMQSourceHandler; @@ -84,7 +86,9 @@ public void init(Config config) throws Exception { @Override public void init(ConnectorContext connectorContext) throws Exception { - this.queue = new LinkedBlockingQueue<>(1000); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); this.sourceConfig = (RabbitMQSourceConfig) ((SourceConnectorContext) connectorContext).getSourceConfig(); this.rabbitmqClient = new RabbitmqClient(rabbitmqConnectionFactory); this.connection = rabbitmqClient.getConnection(sourceConfig.getConnectorConfig().getHost(), @@ -139,15 +143,21 @@ public void stop() { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java index 868639c205..5b858afa30 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java @@ -40,8 +40,6 @@ public class RedisSourceConnector implements Source { - private static final int DEFAULT_BATCH_SIZE = 10; - private RTopic topic; private RedisSourceConfig sourceConfig; @@ -50,6 +48,10 @@ public class RedisSourceConnector implements Source { private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + @Override public Class configClass() { return RedisSourceConfig.class; @@ -73,7 +75,9 @@ private void doInit() { redisConfig.useSingleServer().setAddress(sourceConfig.connectorConfig.getServer()); redisConfig.setCodec(CloudEventCodec.getInstance()); this.redissonClient = Redisson.create(redisConfig); - this.queue = new LinkedBlockingQueue<>(1000); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); } @Override @@ -107,15 +111,21 @@ public void stop() throws Exception { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java index db286eb609..6efed2db3c 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java @@ -52,14 +52,16 @@ public class SpringSourceConnector implements Source, MessageSendingOperations, private static final String CONNECTOR_PROPERTY_PREFIX = "eventmesh.connector."; - private static final int DEFAULT_BATCH_SIZE = 10; - private ApplicationContext applicationContext; private SpringSourceConfig sourceConfig; private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + @Override public Class configClass() { return SpringSourceConfig.class; @@ -69,7 +71,7 @@ public Class configClass() { public void init(Config config) throws Exception { // init config for spring source connector this.sourceConfig = (SpringSourceConfig) config; - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); } @Override @@ -77,7 +79,13 @@ public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; // init config for spring source connector this.sourceConfig = (SpringSourceConfig) sourceConnectorContext.getSourceConfig(); - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); + } + + private void doInit() { + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); } @Override @@ -107,15 +115,21 @@ public void stop() throws Exception { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - ConnectRecord connectRecord = queue.poll(3, TimeUnit.SECONDS); + ConnectRecord connectRecord = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (connectRecord == null) { break; } connectRecords.add(connectRecord); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { Thread currentThread = Thread.currentThread(); log.warn("[SpringSourceConnector] Interrupting thread {} due to exception {}", From 1ab510301d2c5db167be4daa4bcfa270c69d0cc4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 15:50:48 +0530 Subject: [PATCH 46/51] Bump software.amazon.awssdk:s3 from 2.28.12 to 2.29.5 (#5126) Bumps software.amazon.awssdk:s3 from 2.28.12 to 2.29.5. --- updated-dependencies: - dependency-name: software.amazon.awssdk:s3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index e4a7260841..c094f62551 100644 --- a/build.gradle +++ b/build.gradle @@ -794,7 +794,7 @@ subprojects { dependency "javax.annotation:javax.annotation-api:1.3.2" dependency "com.alibaba.fastjson2:fastjson2:2.0.52" - dependency "software.amazon.awssdk:s3:2.28.12" + dependency "software.amazon.awssdk:s3:2.29.5" dependency "com.github.rholder:guava-retrying:2.0.0" dependency "com.alibaba:druid-spring-boot-starter:1.2.23" From 4f9b3109ff8a69e0783c610848820dbadbc40ec0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 23:47:47 +0530 Subject: [PATCH 47/51] Bump com.zendesk:mysql-binlog-connector-java from 0.29.2 to 0.30.1 (#5125) Bumps [com.zendesk:mysql-binlog-connector-java](https://github.com/osheroff/mysql-binlog-connector-java) from 0.29.2 to 0.30.1. - [Changelog](https://github.com/osheroff/mysql-binlog-connector-java/blob/master/CHANGELOG.md) - [Commits](https://github.com/osheroff/mysql-binlog-connector-java/commits) --- updated-dependencies: - dependency-name: com.zendesk:mysql-binlog-connector-java dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-jdbc/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle b/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle index 2d7a14ed25..b70bf6d357 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle @@ -41,7 +41,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation project(":eventmesh-spi") - implementation 'com.zendesk:mysql-binlog-connector-java:0.29.2' + implementation 'com.zendesk:mysql-binlog-connector-java:0.30.1' compileOnly 'com.mysql:mysql-connector-j' compileOnly 'org.projectlombok:lombok' From 27eae86e1eb1db212f6e6767e191395ae7de2cc6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 00:01:16 +0530 Subject: [PATCH 48/51] Bump org.redisson:redisson from 3.36.0 to 3.38.1 (#5124) Bumps [org.redisson:redisson](https://github.com/redisson/redisson) from 3.36.0 to 3.38.1. - [Release notes](https://github.com/redisson/redisson/releases) - [Changelog](https://github.com/redisson/redisson/blob/master/CHANGELOG.md) - [Commits](https://github.com/redisson/redisson/compare/redisson-3.36.0...redisson-3.38.1) --- updated-dependencies: - dependency-name: org.redisson:redisson dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-redis/build.gradle | 2 +- eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/eventmesh-connectors/eventmesh-connector-redis/build.gradle b/eventmesh-connectors/eventmesh-connector-redis/build.gradle index 29b541958a..fabfe1c983 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-redis/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation 'org.redisson:redisson:3.36.0' + implementation 'org.redisson:redisson:3.38.1' api 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle index 6fca0d8b13..4fb1645414 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") // redisson - implementation 'org.redisson:redisson:3.36.0' + implementation 'org.redisson:redisson:3.38.1' // netty implementation 'io.netty:netty-all' From 3bc459b418b5e972a20b6a968b3289c7cfe4345c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 00:02:45 +0530 Subject: [PATCH 49/51] Bump org.apache.kafka:kafka-clients from 3.7.1 to 3.8.1 (#5123) Bumps org.apache.kafka:kafka-clients from 3.7.1 to 3.8.1. --- updated-dependencies: - dependency-name: org.apache.kafka:kafka-clients dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-kafka/build.gradle | 2 +- eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/eventmesh-connectors/eventmesh-connector-kafka/build.gradle b/eventmesh-connectors/eventmesh-connector-kafka/build.gradle index 3156da35a4..2796e03c0e 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-kafka/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation 'io.cloudevents:cloudevents-kafka:2.5.0' - implementation 'org.apache.kafka:kafka-clients:3.7.1' + implementation 'org.apache.kafka:kafka-clients:3.8.1' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' } diff --git a/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle index 91e2cc1de3..c9064cdef4 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle @@ -22,7 +22,7 @@ dependencies { implementation group: 'io.cloudevents', name: 'cloudevents-kafka', version: '2.5.0' // https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients - implementation 'org.apache.kafka:kafka-clients:3.7.1' + implementation 'org.apache.kafka:kafka-clients:3.8.1' testImplementation 'org.junit.jupiter:junit-jupiter' From acaca484bd30d1381e327fa6f147c430266c6541 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 20:26:58 +0530 Subject: [PATCH 50/51] Bump commons-io:commons-io from 2.17.0 to 2.18.0 (#5133) Bumps commons-io:commons-io from 2.17.0 to 2.18.0. --- updated-dependencies: - dependency-name: commons-io:commons-io dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index c094f62551..2e50eb3891 100644 --- a/build.gradle +++ b/build.gradle @@ -46,7 +46,7 @@ buildscript { classpath "com.diffplug.spotless:spotless-plugin-gradle:6.13.0" classpath "org.apache.httpcomponents:httpclient:4.5.14" - classpath "commons-io:commons-io:2.17.0" + classpath "commons-io:commons-io:2.18.0" } } @@ -711,7 +711,7 @@ subprojects { dependency "org.apache.commons:commons-lang3:3.17.0" dependency "org.apache.commons:commons-collections4:4.4" dependency "org.apache.commons:commons-text:1.12.0" - dependency "commons-io:commons-io:2.17.0" + dependency "commons-io:commons-io:2.18.0" dependency "commons-validator:commons-validator:1.9.0" dependency "com.google.guava:guava:33.3.0-jre" From dd9698f222d70a66dd43f82a5ae75c61d632df1d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:48:29 +0530 Subject: [PATCH 51/51] Bump org.apache.kafka:kafka-clients from 3.8.1 to 3.9.0 (#5131) Bumps org.apache.kafka:kafka-clients from 3.8.1 to 3.9.0. --- updated-dependencies: - dependency-name: org.apache.kafka:kafka-clients dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eventmesh-connectors/eventmesh-connector-kafka/build.gradle | 2 +- eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/eventmesh-connectors/eventmesh-connector-kafka/build.gradle b/eventmesh-connectors/eventmesh-connector-kafka/build.gradle index 2796e03c0e..06e4fe97b3 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-kafka/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation 'io.cloudevents:cloudevents-kafka:2.5.0' - implementation 'org.apache.kafka:kafka-clients:3.8.1' + implementation 'org.apache.kafka:kafka-clients:3.9.0' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' } diff --git a/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle index c9064cdef4..dbae8d398d 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle @@ -22,7 +22,7 @@ dependencies { implementation group: 'io.cloudevents', name: 'cloudevents-kafka', version: '2.5.0' // https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients - implementation 'org.apache.kafka:kafka-clients:3.8.1' + implementation 'org.apache.kafka:kafka-clients:3.9.0' testImplementation 'org.junit.jupiter:junit-jupiter'