Skip to content

Commit

Permalink
[ISSUE #5139] update canal connector module (#5140)
Browse files Browse the repository at this point in the history
* [ISSUE #5137] update connector runtime v2 module

* fix checkStyle error

* [ISSUE #5139] update canal connector module
  • Loading branch information
xwm1992 authored Dec 9, 2024
1 parent 293a61e commit f09faa0
Show file tree
Hide file tree
Showing 24 changed files with 1,673 additions and 786 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,5 @@
public class CanalSinkFullConfig extends SinkConfig {
private SinkConnectorConfig sinkConnectorConfig;
private String zeroDate;
private int parallel = 2;
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ public class CanalSinkIncrementConfig extends CanalSinkConfig {
private Integer poolSize = 5;

// sync mode: field/row
private SyncMode syncMode;
private SyncMode syncMode = SyncMode.ROW;

private boolean isGTIDMode = true;
private boolean isGTIDMode = false;

private boolean isMariaDB = true;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.eventmesh.common.config.connector.rdb.canal;

import org.apache.eventmesh.common.config.connector.SourceConfig;
import org.apache.eventmesh.common.remote.offset.RecordPosition;

import java.util.List;

import lombok.Data;
import lombok.EqualsAndHashCode;

@Data
@EqualsAndHashCode(callSuper = true)
public class CanalSourceCheckConfig extends SourceConfig {
private SourceConnectorConfig sourceConnectorConfig;
private List<RecordPosition> startPosition;
private int parallel;
private int flushSize;
private int executePeriod = 3600;
private Integer pagePerSecond = 1;
private Integer recordPerSecond = 100;
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
public class CanalSourceFullConfig extends SourceConfig {
private SourceConnectorConfig sourceConnectorConfig;
private List<RecordPosition> startPosition;
private int parallel;
private int flushSize;
private int parallel = 2;
private int flushSize = 20;
private Integer pagePerSecond = 1;
private Integer recordPerSecond = 100;
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,17 +32,17 @@ public class CanalSourceIncrementConfig extends CanalSourceConfig {

private String destination;

private Long canalInstanceId;
private Long canalInstanceId = 1L;

private String desc;
private String desc = "canalSourceInstance";

private boolean ddlSync = true;
private boolean ddlSync = false;

private boolean filterTableError = false;

private Long slaveId;

private Short clientId;
private Short clientId = 1;

private String serverUUID;

Expand All @@ -67,19 +67,19 @@ public class CanalSourceIncrementConfig extends CanalSourceConfig {
private Boolean enableRemedy = false;

// sync mode: field/row
private SyncMode syncMode;
private SyncMode syncMode = SyncMode.ROW;

// sync consistency
private SyncConsistency syncConsistency;
private SyncConsistency syncConsistency = SyncConsistency.BASE;

// ================================= system parameter
// ================================

// Column name of the bidirectional synchronization mark
private String needSyncMarkTableColumnName = "needSync";
private String needSyncMarkTableColumnName;

// Column value of the bidirectional synchronization mark
private String needSyncMarkTableColumnValue = "needSync";
private String needSyncMarkTableColumnValue;

private SourceConnectorConfig sourceConnectorConfig;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ public class JobRdbFullPosition {
private String tableName;
private String primaryKeyRecords;
private long maxCount;
private long handledRecordCount = 0;
private boolean finished;
private BigDecimal percent;
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition;

import java.util.List;
import java.util.Map;
import java.util.Set;

import lombok.Data;
import lombok.EqualsAndHashCode;
Expand All @@ -31,6 +31,6 @@
@Data
@EqualsAndHashCode(callSuper = true)
public class MySQLTableDef extends RdbTableDefinition {
private Set<String> primaryKeys;
private List<String> primaryKeys;
private Map<String, MySQLColumnDef> columnDefinitions;
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

public enum DataSourceDriverType {
MYSQL,
MariaDB,
REDIS,
ROCKETMQ,
HTTP;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
@ToString
public enum DataSourceType {
MYSQL("MySQL", DataSourceDriverType.MYSQL, DataSourceClassify.RDB),
MariaDB("MariaDB", DataSourceDriverType.MariaDB, DataSourceClassify.RDB),
REDIS("Redis", DataSourceDriverType.REDIS, DataSourceClassify.CACHE),
ROCKETMQ("RocketMQ", DataSourceDriverType.ROCKETMQ, DataSourceClassify.MQ),
HTTP("HTTP", DataSourceDriverType.HTTP, DataSourceClassify.TUNNEL);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -780,8 +780,8 @@ public static LocalDateTime toLocalDateTime(Object value) {
long nanos = ((Timestamp) value).getNanos();
return Instant.ofEpochMilli(((Timestamp) value).getTime() - (nanos / 1000000)).plusNanos(nanos).atZone(ZoneId.systemDefault())
.toLocalDateTime();
} else if (value instanceof java.sql.Date) {
return ((java.sql.Date) value).toLocalDate().atTime(0, 0);
} else if (value instanceof Date) {
return ((Date) value).toLocalDate().atTime(0, 0);
} else {
if (!(value instanceof Time)) {
return ((java.util.Date) value).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,24 +64,27 @@ public boolean before(CanalSinkIncrementConfig sinkConfig, CanalConnectRecord re
String[] keyColumns = null;
String[] otherColumns = null;
if (existOldKeys) {
// update table xxx set pk = newPK where pk = oldPk
keyColumns = buildColumnNames(record.getOldKeys());
otherColumns = buildColumnNames(record.getUpdatedColumns(), record.getKeys());
} else {
keyColumns = buildColumnNames(record.getKeys());
otherColumns = buildColumnNames(record.getUpdatedColumns());
}

if (rowMode && !existOldKeys) {
sql = sqlTemplate.getMergeSql(schemaName,
record.getTableName(),
keyColumns,
otherColumns,
new String[] {},
true,
shardColumns);
} else {
sql = sqlTemplate.getUpdateSql(schemaName, record.getTableName(), keyColumns, otherColumns, true, shardColumns);
}
// not support the column default not null for merge sql
// if (rowMode && !existOldKeys) {
// sql = sqlTemplate.getMergeSql(schemaName,
// record.getTableName(),
// keyColumns,
// otherColumns,
// new String[] {},
// true,
// shardColumns);
// } else {
// sql = sqlTemplate.getUpdateSql(schemaName, record.getTableName(), keyColumns, otherColumns, true, shardColumns);
// }
sql = sqlTemplate.getUpdateSql(schemaName, record.getTableName(), keyColumns, otherColumns, true, shardColumns);
} else if (type.isDelete()) {
sql = sqlTemplate.getDeleteSql(schemaName,
record.getTableName(),
Expand Down
Loading

0 comments on commit f09faa0

Please sign in to comment.