From 3d72e7c710367adc10960558ddf0703c55030936 Mon Sep 17 00:00:00 2001 From: mystarry-sky Date: Thu, 7 Nov 2024 11:35:34 +0800 Subject: [PATCH 1/9] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E6=A0=A1=E9=AA=8C?= =?UTF-8?q?=E5=A4=B1=E8=B4=A5=E5=A4=84=E7=90=86=E6=B5=81=E7=A8=8B=20?= =?UTF-8?q?=E6=97=A5=E5=BF=97=E6=89=93=E5=8D=B0=E5=BC=82=E5=B8=B8=E9=97=AE?= =?UTF-8?q?=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit (cherry picked from commit 1c68dff5407e256e1d3b3fac5c73030e0fd5c19a) --- .../check/slice/SliceCheckEventHandler.java | 4 +- .../data/access/MysqlDataAccessService.java | 4 +- .../data/access/OpgsDataAccessService.java | 60 +++++++++++++------ 3 files changed, 44 insertions(+), 24 deletions(-) diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java index 0e7771b..36d7218 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java @@ -93,9 +93,7 @@ public class SliceCheckEventHandler { */ public void handleFailed(SliceCheckEvent checkEvent) { LogUtils.warn(log, "slice check event , table slice has unknown error [{}][{} : {}]", checkEvent.getCheckName(), - checkEvent.getSource() - .getTableHash(), checkEvent.getSink() - .getTableHash()); + checkEvent.getSource(), checkEvent.getSink()); long count = getCheckSliceCount(checkEvent); sliceCheckContext.refreshSliceCheckProgress(checkEvent.getSlice(), count); CheckDiffResult result = buildSliceDiffResult(checkEvent.getSlice(), (int) count, true, "slice has unknown error"); diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java index 40319a2..8383c69 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java @@ -65,8 +65,8 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public List dasQueryTableNameList() { String schema = properties.getSchema(); - String sql = "SELECT info.table_name tableName FROM information_schema.tables info WHERE table_schema='" - + schema + "'"; + String sql = "select info.table_name tableName from information_schema.tables info where table_schema='" + + schema + "' and table_type='BASE TABLE'"; return adasQueryTableNameList(sql); } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java index 6b5dca7..d910ef5 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java @@ -27,6 +27,7 @@ import org.opengauss.datachecker.common.entry.extract.TableMetadata; import org.opengauss.datachecker.extract.data.mapper.OpgsMetaDataMapper; import javax.annotation.PostConstruct; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -75,22 +76,43 @@ public class OpgsDataAccessService extends AbstractDataAccessService { return health(schema, sql); } + /** + *
+     * DAS查询表名列表
+     *  select c.relname tableName from pg_class c  LEFT JOIN pg_namespace n on n.oid = c.relnamespace
+     *  where n.nspname=? and c.relkind ='r';
+     *  
+ * + * @return tableNameList + */ @Override public List dasQueryTableNameList() { String schema = properties.getSchema(); String sql = "select c.relname tableName from pg_class c LEFT JOIN pg_namespace n on n.oid = c.relnamespace " - + " where n.nspname='" + schema + "' and c.relkind ='r';"; + + " where n.nspname='" + schema + "' and c.relkind ='r';"; return adasQueryTableNameList(sql); } + /** + *
+     *     查询表主键列信息
+     *      select c.relname tableName,ns.nspname,ns.oid,a.attname columnName from pg_class c
+     *      left join pg_namespace ns on c.relnamespace=ns.oid
+     *      left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped
+     *      inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey)
+     *      where ns.nspname='test' and cs.contype='p';
+     * 
+ * + * @return primaryColumnList 主键列信息列表 + */ @Override public List queryTablePrimaryColumns() { String schema = properties.getSchema(); String sql = "select c.relname tableName,ns.nspname,ns.oid,a.attname columnName from pg_class c " - + "left join pg_namespace ns on c.relnamespace=ns.oid " - + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " - + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " - + "where ns.nspname='" + schema + "' and cs.contype='p';"; + + "left join pg_namespace ns on c.relnamespace=ns.oid " + + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " + + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " + + "where ns.nspname='" + schema + "' and cs.contype='p';"; return adasQueryTablePrimaryColumns(sql); } @@ -98,10 +120,10 @@ public class OpgsDataAccessService extends AbstractDataAccessService { public List queryTablePrimaryColumns(String tableName) { String schema = properties.getSchema(); String sql = "select c.relname tableName,ns.nspname,ns.oid,a.attname columnName from pg_class c " - + "left join pg_namespace ns on c.relnamespace=ns.oid " - + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " - + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " - + "where ns.nspname='" + schema + "' and c.relname='" + tableName + "' and cs.contype='p';"; + + "left join pg_namespace ns on c.relnamespace=ns.oid " + + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " + + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " + + "where ns.nspname='" + schema + "' and c.relname='" + tableName + "' and cs.contype='p';"; return adasQueryTablePrimaryColumns(sql); } @@ -119,12 +141,12 @@ public class OpgsDataAccessService extends AbstractDataAccessService { public List dasQueryTableMetadataList() { LowerCaseTableNames lowerCaseTableNames = getLowerCaseTableNames(); String colTableName = Objects.equals(LowerCaseTableNames.SENSITIVE, lowerCaseTableNames) - ? "c.relname tableName" - : "lower(c.relname) tableName"; + ? "c.relname tableName" + : "lower(c.relname) tableName"; String sql = " select n.nspname tableSchema, " + colTableName + ",c.reltuples tableRows, " - + "case when c.reltuples>0 then pg_table_size(c.oid)/c.reltuples else 0 end as avgRowLength " - + "from pg_class c LEFT JOIN pg_namespace n on n.oid = c.relnamespace " + "where n.nspname='" - + properties.getSchema() + "' and c.relkind ='r';"; + + "case when c.reltuples>0 then pg_table_size(c.oid)/c.reltuples else 0 end as avgRowLength " + + "from pg_class c LEFT JOIN pg_namespace n on n.oid = c.relnamespace " + "where n.nspname='" + + properties.getSchema() + "' and c.relkind ='r';"; return wrapperTableMetadata(adasQueryTableMetadataList(sql)); } @@ -153,9 +175,9 @@ public class OpgsDataAccessService extends AbstractDataAccessService { @Override public List queryPointList(Connection connection, DataAccessParam param) { String sql = "select s.%s from ( select row_number() over(order by r.%s asc) as rn,r.%s from %s.%s r) s" - + " where mod(s.rn, %s ) = 1;"; + + " where mod(s.rn, %s ) = 1;"; sql = String.format(sql, param.getColName(), param.getColName(), param.getColName(), param.getSchema(), - param.getName(), param.getOffset()); + param.getName(), param.getOffset()); return adasQueryPointList(connection, sql); } @@ -164,7 +186,6 @@ public class OpgsDataAccessService extends AbstractDataAccessService { return opgsMetaDataMapper.checkDatabaseNotEmpty(properties.getSchema()); } - @Override public LowerCaseTableNames queryLowerCaseTableNames() { String sql = "SHOW VARIABLES LIKE \"lower_case_table_names\";"; @@ -181,7 +202,8 @@ public class OpgsDataAccessService extends AbstractDataAccessService { } finally { closeConnection(connection); } - return isOgCompatibilityB() ? result.getOrDefault(DOLPHIN_LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN) - : result.getOrDefault(LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN); + return isOgCompatibilityB() + ? result.getOrDefault(DOLPHIN_LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN) + : result.getOrDefault(LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN); } } -- Gitee From a5c7891f3db097b4f8a02d17dcbec3bd1dd11bcc Mon Sep 17 00:00:00 2001 From: mystarry-sky Date: Sat, 9 Nov 2024 16:09:19 +0800 Subject: [PATCH 2/9] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E8=A1=A8=E5=94=AF?= =?UTF-8?q?=E4=B8=80=E6=80=A7=E7=BA=A6=E6=9D=9F=E5=9C=BA=E6=99=AF=E8=A1=A8?= =?UTF-8?q?=E6=A0=A1=E9=AA=8C=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit (cherry picked from commit 4bddd59cc41cc874b53959ed5352e5affede8f0a) --- config/log4j2.xml | 10 +-- .../entry/extract/PrimaryColumnBean.java | 4 + .../entry/extract/UniqueColumnBean.java | 48 +++++++++++ .../extract/data/BaseDataService.java | 61 +++++++------- .../access/AbstractDataAccessService.java | 80 +++++++++++++++---- .../data/access/CsvDataAccessService.java | 5 ++ .../data/access/DataAccessService.java | 30 ++++--- .../data/access/MysqlDataAccessService.java | 35 +++++--- .../data/access/OpgsDataAccessService.java | 13 +++ .../data/access/OracleDataAccessService.java | 25 ++++-- 10 files changed, 231 insertions(+), 80 deletions(-) create mode 100644 datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java diff --git a/config/log4j2.xml b/config/log4j2.xml index e22d912..f8cd663 100644 --- a/config/log4j2.xml +++ b/config/log4j2.xml @@ -30,19 +30,13 @@ - + - - - - - - + diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java index 651e26e..5f85c51 100644 --- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java +++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java @@ -15,7 +15,9 @@ package org.opengauss.datachecker.common.entry.extract; +import lombok.AllArgsConstructor; import lombok.Data; +import lombok.NoArgsConstructor; /** * PrimaryColumnBean @@ -25,6 +27,8 @@ import lombok.Data; * @since :11 */ @Data +@NoArgsConstructor +@AllArgsConstructor public class PrimaryColumnBean { /** * Table diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java new file mode 100644 index 0000000..158f147 --- /dev/null +++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved. + * + * openGauss is licensed under Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * + * http://license.coscl.org.cn/MulanPSL2 + * + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, + * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, + * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +package org.opengauss.datachecker.common.entry.extract; + +import lombok.Data; + +/** + * UniqueColumnBean + * + * @author :wangchao + * @date :Created in 2023/12/23 + * @since :11 + */ +@Data +public class UniqueColumnBean { + /** + * Table + */ + private String tableName; + + /** + * Primary key column name + */ + private String columnName; + + /** + * Index identifier + */ + private String indexIdentifier; + + /** + * Column index + */ + private Integer colIdx; +} \ No newline at end of file diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java index 45d3c7c..54e9451 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java @@ -16,6 +16,7 @@ package org.opengauss.datachecker.extract.data; import com.alibaba.druid.pool.DruidDataSource; + import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.MapUtils; import org.apache.logging.log4j.Logger; @@ -33,6 +34,7 @@ import org.opengauss.datachecker.extract.service.RuleAdapterService; import org.springframework.stereotype.Service; import javax.annotation.Resource; + import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; @@ -103,15 +105,13 @@ public class BaseDataService { */ public List bdsQueryTableMetadataList() { List metadataList = dataAccessService.dasQueryTableMetadataList(); - return metadataList.stream() - .filter(meta -> { - boolean isChecking = ruleAdapterService.filterTableByRule(meta.getTableName()); - if (isChecking) { - tableNameList.add(meta.getTableName()); - } - return isChecking; - }) - .collect(Collectors.toList()); + return metadataList.stream().filter(meta -> { + boolean isChecking = ruleAdapterService.filterTableByRule(meta.getTableName()); + if (isChecking) { + tableNameList.add(meta.getTableName()); + } + return isChecking; + }).collect(Collectors.toList()); } /** @@ -124,8 +124,7 @@ public class BaseDataService { if (CollectionUtils.isEmpty(columnBeanList)) { return new HashMap<>(); } - return columnBeanList.stream() - .collect(Collectors.groupingBy(PrimaryColumnBean::getTableName)); + return columnBeanList.stream().collect(Collectors.groupingBy(PrimaryColumnBean::getTableName)); } private List filterByTableRules(List tableNameList) { @@ -189,28 +188,31 @@ public class BaseDataService { /** * update table metadata, and filter column rules * - * @param tableMetadata table metadata + * @param tableMetadata table metadata * @param primaryColumnBeans primary column */ public void updateTableColumnMetaData(TableMetadata tableMetadata, List primaryColumnBeans) { String tableName = tableMetadata.getTableName(); final List columns = dataAccessService.queryTableColumnsMetaData(tableName); - if (Objects.isNull(columns)) { + if (CollectionUtils.isEmpty(columns)) { LogUtils.error(log, "table columns metadata is null ,{}", tableName); return; } - if (Objects.isNull(primaryColumnBeans)) { - primaryColumnBeans = dataAccessService.queryTablePrimaryColumns(tableName); + List tempPrimaryColumnBeans = primaryColumnBeans; + if (CollectionUtils.isEmpty(primaryColumnBeans)) { + tempPrimaryColumnBeans = dataAccessService.queryTablePrimaryColumns(tableName); } - if (Objects.nonNull(primaryColumnBeans)) { - List primaryColumnNameList = getPrimaryColumnNames(primaryColumnBeans); + if (CollectionUtils.isEmpty(tempPrimaryColumnBeans)) { + tempPrimaryColumnBeans = dataAccessService.queryTableUniqueColumns(tableName); + } + if (CollectionUtils.isNotEmpty(tempPrimaryColumnBeans)) { + List primaryColumnNameList = getPrimaryColumnNames(tempPrimaryColumnBeans); for (ColumnsMetaData column : columns) { if (primaryColumnNameList.contains(column.getLowerCaseColumnName())) { column.setColumnKey(ColumnKey.PRI); } } } - tableMetadata.setColumnsMetas(ruleAdapterService.executeColumnRule(columns)); tableMetadata.setPrimaryMetas(getTablePrimaryColumn(columns)); tableMetadata.setTableHash(calcTableHash(columns)); @@ -218,16 +220,17 @@ public class BaseDataService { private List getPrimaryColumnNames(List primaryColumnBeans) { return primaryColumnBeans.stream() - .map(PrimaryColumnBean::getColumnName) - .map(String::toLowerCase) - .collect(Collectors.toList()); + .map(PrimaryColumnBean::getColumnName) + .map(String::toLowerCase) + .distinct() + .collect(Collectors.toList()); } private List getTablePrimaryColumn(List columnsMetaData) { return columnsMetaData.stream() - .filter(meta -> ColumnKey.PRI.equals(meta.getColumnKey())) - .sorted(Comparator.comparing(ColumnsMetaData::getOrdinalPosition)) - .collect(Collectors.toList()); + .filter(meta -> ColumnKey.PRI.equals(meta.getColumnKey())) + .sorted(Comparator.comparing(ColumnsMetaData::getOrdinalPosition)) + .collect(Collectors.toList()); } /** @@ -255,9 +258,8 @@ public class BaseDataService { private long calcTableHash(List columnsMetas) { StringBuilder buffer = new StringBuilder(); columnsMetas.sort(Comparator.comparing(ColumnsMetaData::getOrdinalPosition)); - columnsMetas.forEach(column -> buffer.append(column.getColumnName() - .toLowerCase(Locale.ENGLISH)) - .append(column.getOrdinalPosition())); + columnsMetas.forEach(column -> buffer.append(column.getColumnName().toLowerCase(Locale.ENGLISH)) + .append(column.getOrdinalPosition())); return HASH_UTIL.hashBytes(buffer.toString()); } @@ -289,9 +291,8 @@ public class BaseDataService { } else { String[] sqlModeArray = sqlMode.split(","); String newSqlMode = Arrays.stream(sqlModeArray) - .filter(mode -> !mode.equalsIgnoreCase( - ConfigConstants.SQL_MODE_NAME_PAD_CHAR_TO_FULL_LENGTH)) - .collect(Collectors.joining(",")); + .filter(mode -> !mode.equalsIgnoreCase(ConfigConstants.SQL_MODE_NAME_PAD_CHAR_TO_FULL_LENGTH)) + .collect(Collectors.joining(",")); boolean isPadCharFull = ConfigCache.getBooleanValue(ConfigConstants.SQL_MODE_PAD_CHAR_TO_FULL_LENGTH); if (isPadCharFull) { newSqlMode += ConfigConstants.SQL_MODE_NAME_PAD_CHAR_TO_FULL_LENGTH; diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java index a83d498..93b19b8 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java @@ -16,6 +16,9 @@ package org.opengauss.datachecker.extract.data.access; import com.alibaba.druid.pool.DruidDataSource; + +import cn.hutool.core.collection.CollUtil; + import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.opengauss.datachecker.common.config.ConfigCache; @@ -25,6 +28,7 @@ import org.opengauss.datachecker.common.entry.common.Health; import org.opengauss.datachecker.common.entry.enums.LowerCaseTableNames; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.common.exception.ExtractDataAccessException; import org.opengauss.datachecker.common.util.DurationUtils; import org.opengauss.datachecker.common.util.LogUtils; @@ -36,16 +40,19 @@ import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import javax.annotation.Resource; import javax.sql.DataSource; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.time.Duration; import java.time.LocalDateTime; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; /** * AbstractDataAccessService @@ -113,7 +120,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { public String adasQuerySchema(Connection connection, String executeQueryStatement) { String schema = ""; try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { if (resultSet.next()) { schema = resultSet.getString(RS_COL_SCHEMA); } @@ -129,7 +136,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { * 数据库schema是否合法 * * @param schema schema - * @param sql sql + * @param sql sql * @return result */ public Health health(String schema, String sql) { @@ -160,7 +167,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { Connection connection = getConnection(); List list = new LinkedList<>(); try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { while (resultSet.next()) { list.add(resultSet.getString(RS_COL_TABLE_NAME)); } @@ -185,7 +192,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { Connection connection = getConnection(); List list = new LinkedList<>(); try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { PrimaryColumnBean metadata; while (resultSet.next()) { metadata = new PrimaryColumnBean(); @@ -203,6 +210,50 @@ public abstract class AbstractDataAccessService implements DataAccessService { return list; } + /** + * adas查询表的唯一性约束列信息 + * + * @param executeQueryStatement executeQueryStatement + * @return List + */ + public List adasQueryTableUniqueColumns(String executeQueryStatement) { + Connection connection = getConnection(); + List list = new LinkedList<>(); + try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); + ResultSet resultSet = ps.executeQuery()) { + UniqueColumnBean metadata; + while (resultSet.next()) { + metadata = new UniqueColumnBean(); + metadata.setTableName(resultSet.getString("tableName")); + metadata.setColumnName(resultSet.getString("columnName")); + metadata.setIndexIdentifier(resultSet.getString("indexIdentifier")); + metadata.setColIdx(resultSet.getInt("colIdx")); + list.add(metadata); + } + } catch (SQLException esql) { + LogUtils.error(log, "adasQueryTablePrimaryColumns error:", esql); + } finally { + closeConnection(connection); + } + return list; + } + + /** + * 将UniqueColumnBean列表转换为PrimaryColumnBean列表 + * + * @param uniqueColumns 输入的UniqueColumnBean列表,可能为空 + * @return PrimaryColumnBean列表,永远不会为null,其中的元素是唯一的 + */ + public List translateUniqueToPrimaryColumns(List uniqueColumns) { + if (CollUtil.isEmpty(uniqueColumns)) { + return new ArrayList<>(); + } + return uniqueColumns.stream() + .map(u -> new PrimaryColumnBean(u.getTableName(), u.getColumnName())) + .distinct() + .collect(Collectors.toList()); + } + /** * adasQueryTableMetadataList * @@ -214,7 +265,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { Connection connection = getConnection(); List list = new LinkedList<>(); try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { TableMetadata metadata; while (resultSet.next()) { metadata = new TableMetadata(); @@ -238,7 +289,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { * 查询表数据抽样检查点清单 * * @param connection connection - * @param sql 检查点查询SQL + * @param sql 检查点查询SQL * @return 检查点列表 */ protected List adasQueryPointList(Connection connection, String sql) { @@ -259,7 +310,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { * 查询表数据抽样检查点清单 * * @param connection connection - * @param sql 检查点查询SQL + * @param sql 检查点查询SQL * @return 检查点列表 */ protected String adasQueryOnePoint(Connection connection, String sql) { @@ -277,8 +328,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { } private long durationBetweenToMillis(LocalDateTime start, LocalDateTime end) { - return Duration.between(start, end) - .toMillis(); + return Duration.between(start, end).toMillis(); } /** @@ -292,15 +342,15 @@ public abstract class AbstractDataAccessService implements DataAccessService { return null; } return tableMetadata.setDataBaseType(properties.getDatabaseType()) - .setEndpoint(properties.getEndpoint()) - .setOgCompatibilityB(isOgCompatibilityB); + .setEndpoint(properties.getEndpoint()) + .setOgCompatibilityB(isOgCompatibilityB); } /** * jdbc mode does not use it * - * @param table table - * @param fileName fileName + * @param table table + * @param fileName fileName * @param differenceList differenceList * @return result */ @@ -317,8 +367,8 @@ public abstract class AbstractDataAccessService implements DataAccessService { */ protected List wrapperTableMetadata(List list) { list.forEach(meta -> meta.setDataBaseType(properties.getDatabaseType()) - .setEndpoint(properties.getEndpoint()) - .setOgCompatibilityB(isOgCompatibilityB)); + .setEndpoint(properties.getEndpoint()) + .setOgCompatibilityB(isOgCompatibilityB)); return list; } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java index f8eb60a..322796b 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java @@ -269,4 +269,9 @@ public class CsvDataAccessService implements DataAccessService { public LowerCaseTableNames queryLowerCaseTableNames() { return LowerCaseTableNames.INSENSITIVE; } + + @Override + public List queryTableUniqueColumns(String tableName) { + return null; + } } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java index 5d2e84d..6eaef38 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java @@ -25,6 +25,7 @@ import org.opengauss.datachecker.common.entry.extract.TableMetadata; import org.springframework.jdbc.core.RowMapper; import javax.sql.DataSource; + import java.sql.Connection; import java.util.List; import java.util.Map; @@ -111,7 +112,7 @@ public interface DataAccessService { * query table column min value * * @param connection connection - * @param param param + * @param param param * @return min value of string */ String min(Connection connection, DataAccessParam param); @@ -120,7 +121,7 @@ public interface DataAccessService { * query table column max value * * @param connection connection - * @param param param + * @param param param * @return max value of string */ String max(Connection connection, DataAccessParam param); @@ -136,10 +137,10 @@ public interface DataAccessService { /** * query row data by sql * - * @param sql sql - * @param param sql param + * @param sql sql + * @param param sql param * @param rowMapper row mapper - * @param data type + * @param data type * @return data */ List query(String sql, Map param, RowMapper rowMapper); @@ -147,10 +148,10 @@ public interface DataAccessService { /** * query data from csv file * - * @param table table - * @param fileName fileName + * @param table table + * @param fileName fileName * @param differenceList differenceList - * @return + * @return data */ List> query(String table, String fileName, List differenceList); @@ -165,7 +166,7 @@ public interface DataAccessService { * query table check point list * * @param connection connection - * @param param param + * @param param param * @return point list */ List queryPointList(Connection connection, DataAccessParam param); @@ -187,4 +188,15 @@ public interface DataAccessService { * @return value */ LowerCaseTableNames queryLowerCaseTableNames(); + + /** + * query table unique columns + *
+     *     唯一性约束与唯一性索引
+     * 
+ * + * @param tableName table + * @return unique columns + */ + List queryTableUniqueColumns(String tableName); } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java index 8383c69..d388461 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java @@ -21,6 +21,7 @@ import org.opengauss.datachecker.common.entry.enums.LowerCaseTableNames; import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.extract.data.mapper.MysqlMetaDataMapper; import java.sql.Connection; @@ -52,8 +53,8 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public Health health() { String schema = properties.getSchema(); - String sql = "SELECT SCHEMA_NAME tableSchema FROM information_schema.SCHEMATA info WHERE SCHEMA_NAME='" - + schema + "' limit 1"; + String sql = "SELECT SCHEMA_NAME tableSchema FROM information_schema.SCHEMATA info WHERE SCHEMA_NAME='" + schema + + "' limit 1"; return health(schema, sql); } @@ -66,7 +67,7 @@ public class MysqlDataAccessService extends AbstractDataAccessService { public List dasQueryTableNameList() { String schema = properties.getSchema(); String sql = "select info.table_name tableName from information_schema.tables info where table_schema='" - + schema + "' and table_type='BASE TABLE'"; + + schema + "' and table_type='BASE TABLE'"; return adasQueryTableNameList(sql); } @@ -83,11 +84,23 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public List queryTablePrimaryColumns() { String sql = "select table_name tableName ,lower(column_name) columnName from information_schema.columns " - + "where table_schema='" + properties.getSchema() - + "' and column_key='PRI' order by ordinal_position asc "; + + "where table_schema='" + properties.getSchema() + "' and column_key='PRI' order by ordinal_position asc "; return adasQueryTablePrimaryColumns(sql); } + @Override + public List queryTableUniqueColumns(String tableName) { + String schema = properties.getSchema(); + String sql = "select kcu.table_name tableName, kcu.column_name columnName,kcu.ordinal_position colIdx," + + " kcu.constraint_name indexIdentifier from information_schema.table_constraints tc " + + " left join information_schema.KEY_COLUMN_USAGE kcu on tc.table_schema =kcu.table_schema" + + " and tc.constraint_name=kcu.constraint_name and tc.table_name = kcu.table_name" + + " where tc.table_schema='" + schema + "' and tc.table_name='" + tableName + "'" + + " and tc.constraint_type='UNIQUE' ;"; + List uniqueColumns = adasQueryTableUniqueColumns(sql); + return translateUniqueToPrimaryColumns(uniqueColumns); + } + @Override public List queryTablePrimaryColumns(String tableName) { return mysqlMetaDataMapper.queryTablePrimaryColumnsByTableName(properties.getSchema(), tableName); @@ -97,11 +110,11 @@ public class MysqlDataAccessService extends AbstractDataAccessService { public List dasQueryTableMetadataList() { LowerCaseTableNames lowerCaseTableNames = getLowerCaseTableNames(); String colTableName = Objects.equals(LowerCaseTableNames.SENSITIVE, lowerCaseTableNames) - ? "info.table_name tableName" - : "lower(info.table_name) tableName"; + ? "info.table_name tableName" + : "lower(info.table_name) tableName"; String sql = " SELECT info.TABLE_SCHEMA tableSchema," + colTableName + ",info.table_rows tableRows , " - + "info.avg_row_length avgRowLength FROM information_schema.tables info WHERE TABLE_SCHEMA='" - + properties.getSchema() + "'"; + + "info.avg_row_length avgRowLength FROM information_schema.tables info WHERE TABLE_SCHEMA='" + + properties.getSchema() + "'"; return wrapperTableMetadata(adasQueryTableMetadataList(sql)); } @@ -130,9 +143,9 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public List queryPointList(Connection connection, DataAccessParam param) { String sql = "select s.%s from (SELECT @rowno:=@rowno+1 as rn,r.%s from %s.%s r," - + " (select @rowno := 0) t ORDER BY r.%s asc) s where mod(s.rn, %s) = 1"; + + " (select @rowno := 0) t ORDER BY r.%s asc) s where mod(s.rn, %s) = 1"; sql = String.format(sql, param.getColName(), param.getColName(), param.getSchema(), param.getName(), - param.getColName(), param.getOffset()); + param.getColName(), param.getOffset()); return adasQueryPointList(connection, sql); } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java index d910ef5..86a0843 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java @@ -24,6 +24,7 @@ import org.opengauss.datachecker.common.entry.enums.OgCompatibility; import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.extract.data.mapper.OpgsMetaDataMapper; import javax.annotation.PostConstruct; @@ -127,6 +128,18 @@ public class OpgsDataAccessService extends AbstractDataAccessService { return adasQueryTablePrimaryColumns(sql); } + @Override + public List queryTableUniqueColumns(String tableName) { + String schema = properties.getSchema(); + String sql = "SELECT c.relname AS tableName, ns.nspname, i.indexrelid indexIdentifier, " + + " a.attname AS columnName, a.attnum colIdx FROM pg_index i" + + " JOIN pg_class c ON i.indrelid = c.oid join pg_namespace ns on c.relnamespace=ns.oid" + + " JOIN pg_attribute a ON i.indrelid = a.attrelid AND a.attnum = ANY(i.indkey) " + + " where ns.nspname='" + schema + "' and c.relname='" + tableName + "' and i.indisunique = true;"; + List uniqueColumns = adasQueryTableUniqueColumns(sql); + return translateUniqueToPrimaryColumns(uniqueColumns); + } + @Override public List queryTableColumnsMetaData(String tableName) { return opgsMetaDataMapper.queryTableColumnsMetaData(properties.getSchema(), tableName); diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java index b2426c1..e29bafb 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java @@ -21,6 +21,7 @@ import org.opengauss.datachecker.common.entry.enums.LowerCaseTableNames; import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.extract.data.mapper.OracleMetaDataMapper; import java.sql.Connection; @@ -76,11 +77,22 @@ public class OracleDataAccessService extends AbstractDataAccessService { @Override public List queryTablePrimaryColumns() { String sql = "SELECT A.TABLE_NAME tableName, A.COLUMN_NAME columnName FROM ALL_CONS_COLUMNS A,ALL_CONSTRAINTS B" - + " WHERE A.constraint_name = B.constraint_name AND B.constraint_type = 'P' AND A.OWNER = '" - + properties.getSchema() + "'"; + + " WHERE A.constraint_name = B.constraint_name AND B.constraint_type = 'P' AND A.OWNER = '" + + properties.getSchema() + "'"; return adasQueryTablePrimaryColumns(sql); } + @Override + public List queryTableUniqueColumns(String tableName) { + String schema = properties.getSchema(); + String sql = "SELECT uc.table_name tableName,uc.constraint_name indexIdentifier,ucc.column_name columnName," + + " uc.constraint_type,ucc.position colIdx FROM USER_CONSTRAINTS uc " + + " JOIN USER_CONS_COLUMNS ucc ON uc.constraint_name=ucc.constraint_name " + + " WHERE uc.constraint_type='U' and uc.owner='" + schema + "'and uc.table_name='" + tableName + "'"; + List uniqueColumns = adasQueryTableUniqueColumns(sql); + return translateUniqueToPrimaryColumns(uniqueColumns); + } + @Override public List queryTablePrimaryColumns(String tableName) { return oracleMetaDataMapper.queryTablePrimaryColumnsByTableName(properties.getSchema(), tableName); @@ -91,12 +103,11 @@ public class OracleDataAccessService extends AbstractDataAccessService { String schema = properties.getSchema(); LowerCaseTableNames lowerCaseTableNames = getLowerCaseTableNames(); String colTableName = Objects.equals(LowerCaseTableNames.SENSITIVE, lowerCaseTableNames) - ? "t.table_name tableName" - : "lower(t.table_name) tableName"; + ? "t.table_name tableName" + : "lower(t.table_name) tableName"; String sql = "SELECT t.owner tableSchema," + colTableName + ",t.num_rows tableRows,avg_row_len avgRowLength" - + " FROM ALL_TABLES t LEFT JOIN (SELECT DISTINCT table_name from ALL_CONSTRAINTS where OWNER = '" - + schema + "' AND constraint_type='P') pc on t.table_name=pc.table_name WHERE t.OWNER = '" - + schema + "'"; + + " FROM ALL_TABLES t LEFT JOIN (SELECT DISTINCT table_name from ALL_CONSTRAINTS where OWNER = '" + schema + + "' AND constraint_type='P') pc on t.table_name=pc.table_name WHERE t.OWNER = '" + schema + "'"; return wrapperTableMetadata(adasQueryTableMetadataList(sql)); } -- Gitee From cdc6df4e6e2c7fe4dea02eeaf8ed8310e4150be4 Mon Sep 17 00:00:00 2001 From: mystarry-sky Date: Mon, 11 Nov 2024 16:12:14 +0800 Subject: [PATCH 3/9] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E8=A1=A8=E5=94=AF?= =?UTF-8?q?=E4=B8=80=E6=80=A7=E7=B4=A2=E5=BC=95=E5=9C=BA=E6=99=AF=E8=A1=A8?= =?UTF-8?q?=E6=A0=A1=E9=AA=8C=E3=80=82=20=E4=BF=AE=E5=A4=8Dconsumer?= =?UTF-8?q?=E6=8B=89=E5=8F=96=E5=BC=82=E5=B8=B8=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit (cherry picked from commit db62822faa2f7da16b4a6f4d653e113c28f9d77c) --- .../modules/check/KafkaConsumerHandler.java | 27 ++-- .../check/slice/SliceCheckContext.java | 15 +- .../check/slice/SliceCheckWorker.java | 135 ++++++++---------- .../data/access/MysqlDataAccessService.java | 11 +- .../data/access/OracleDataAccessService.java | 10 +- 5 files changed, 98 insertions(+), 100 deletions(-) diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java index e142040..3a83c2a 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java @@ -16,6 +16,7 @@ package org.opengauss.datachecker.check.modules.check; import com.alibaba.fastjson.JSON; + import org.apache.commons.lang3.StringUtils; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -26,6 +27,7 @@ import org.opengauss.datachecker.common.entry.extract.RowDataHash; import org.opengauss.datachecker.common.entry.extract.SliceExtend; import org.opengauss.datachecker.common.exception.CheckConsumerPollEmptyException; import org.opengauss.datachecker.common.util.LogUtils; +import org.opengauss.datachecker.common.util.ThreadUtil; import java.time.Duration; import java.util.*; @@ -44,10 +46,11 @@ public class KafkaConsumerHandler { private static final int MAX_CONSUMER_POLL_TIMES = 50; private KafkaConsumer kafkaConsumer; + /** * Constructor * - * @param consumer consumer + * @param consumer consumer * @param retryTimes retryTimes */ public KafkaConsumerHandler(KafkaConsumer consumer, int retryTimes) { @@ -66,6 +69,7 @@ public class KafkaConsumerHandler { /** * 获取kafka consumer * + * @return consumer */ public KafkaConsumer getConsumer() { return kafkaConsumer; @@ -74,7 +78,7 @@ public class KafkaConsumerHandler { /** * Query the Kafka partition data corresponding to the specified table * - * @param topic Kafka topic + * @param topic Kafka topic * @param partitions Kafka partitions * @return kafka partitions data */ @@ -96,8 +100,8 @@ public class KafkaConsumerHandler { * consumer poll data from the topic partition, and filter bu slice extend. then add data in the data list. * * @param topicPartition topic partition - * @param sExtend slice extend - * @param attempts + * @param sExtend slice extend + * @param attempts attempts */ public void consumerAssign(TopicPartition topicPartition, SliceExtend sExtend, int attempts) { kafkaConsumer.assign(List.of(topicPartition)); @@ -109,20 +113,21 @@ public class KafkaConsumerHandler { /** * consumer poll data from the topic partition, and filter bu slice extend. then add data in the data list. * - * @param sExtend slice extend + * @param sExtend slice extend * @param dataList data list */ public synchronized void pollTpSliceData(SliceExtend sExtend, List dataList) { AtomicLong currentCount = new AtomicLong(0); int pollEmptyCount = 0; while (currentCount.get() < sExtend.getCount()) { - ConsumerRecords records = - kafkaConsumer.poll(Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); + ConsumerRecords records = kafkaConsumer.poll( + Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); if (records.count() <= 0) { pollEmptyCount++; if (pollEmptyCount > MAX_CONSUMER_POLL_TIMES) { throw new CheckConsumerPollEmptyException(sExtend.getName()); } + ThreadUtil.sleep(KAFKA_CONSUMER_POLL_DURATION); continue; } pollEmptyCount = 0; @@ -139,8 +144,8 @@ public class KafkaConsumerHandler { /** * Query the Kafka partition data corresponding to the specified table * - * @param topic Kafka topic - * @param partitions Kafka partitions + * @param topic Kafka topic + * @param partitions Kafka partitions * @param shouldChangeConsumerGroup if true change consumer Group random * @return kafka partitions data */ @@ -188,8 +193,8 @@ public class KafkaConsumerHandler { } private void getTopicRecords(List dataList, KafkaConsumer kafkaConsumer) { - ConsumerRecords consumerRecords = - kafkaConsumer.poll(Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); + ConsumerRecords consumerRecords = kafkaConsumer.poll( + Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); consumerRecords.forEach(record -> { dataList.add(JSON.parseObject(record.value(), RowDataHash.class)); }); diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java index 5b23f55..9f0bc4f 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java @@ -81,10 +81,19 @@ public class SliceCheckContext { kafkaConsumerService.getRetryFetchRecordTimes()); } + /** + * get consumer retry fetch record times + * + * @return duration times + */ + public int getRetryFetchRecordTimes() { + return kafkaConsumerService.getRetryFetchRecordTimes(); + } + /** * get source or sink table topic * - * @param table table + * @param table table * @param endpoint source or sink * @return topic name */ @@ -97,7 +106,7 @@ public class SliceCheckContext { /** * refresh slice check progress * - * @param slice slice + * @param slice slice * @param rowCount slice of row count */ public void refreshSliceCheckProgress(SliceVo slice, long rowCount) { @@ -107,7 +116,7 @@ public class SliceCheckContext { /** * add slice check Result * - * @param slice slice + * @param slice slice * @param result check result */ public void addCheckResult(SliceVo slice, CheckDiffResult result) { diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java index 29c6bd0..26b42af 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java @@ -17,6 +17,7 @@ package org.opengauss.datachecker.check.slice; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; + import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.MapUtils; import org.apache.kafka.common.TopicPartition; @@ -47,6 +48,7 @@ import org.opengauss.datachecker.common.exception.BucketNumberInconsistentExcept import org.opengauss.datachecker.common.exception.CheckConsumerPollEmptyException; import org.opengauss.datachecker.common.exception.MerkleTreeDepthException; import org.opengauss.datachecker.common.util.LogUtils; +import org.opengauss.datachecker.common.util.ThreadUtil; import org.opengauss.datachecker.common.util.TopicUtil; import org.springframework.lang.NonNull; @@ -72,8 +74,6 @@ import java.util.concurrent.CountDownLatch; public class SliceCheckWorker implements Runnable { private static final Logger LOGGER = LogUtils.getLogger(SliceCheckWorker.class); private static final int THRESHOLD_MIN_BUCKET_SIZE = 2; - // 设置最大尝试次数 - private static final int MAX_ATTEMPTS=5; private final SliceVo slice; @@ -81,17 +81,19 @@ public class SliceCheckWorker implements Runnable { private final SliceCheckEvent checkEvent; private final SliceCheckContext checkContext; private final TaskRegisterCenter registerCenter; - private final DifferencePair, List, List> difference = - DifferencePair.of(new LinkedList<>(), new LinkedList<>(), new LinkedList<>()); + private final DifferencePair, List, List> difference = DifferencePair.of( + new LinkedList<>(), new LinkedList<>(), new LinkedList<>()); private final LocalDateTime startTime; private long sliceRowCount; + // 设置最大尝试次数 + private int maxAttemptsTimes; private Topic topic = new Topic(); /** * slice check worker construct * - * @param checkEvent check event + * @param checkEvent check event * @param sliceCheckContext slice check context */ public SliceCheckWorker(SliceCheckEvent checkEvent, SliceCheckContext sliceCheckContext, @@ -102,6 +104,7 @@ public class SliceCheckWorker implements Runnable { this.slice = checkEvent.getSlice(); this.registerCenter = registerCenter; this.processNo = ConfigCache.getValue(ConfigConstants.PROCESS_NO); + this.maxAttemptsTimes = sliceCheckContext.getRetryFetchRecordTimes(); } @Override @@ -168,22 +171,17 @@ public class SliceCheckWorker implements Runnable { LogUtils.debug(LOGGER, "slice {} fetch empty", slice.getName()); } else { // sourceSize is less than thresholdMinBucketSize, that is, there is only one bucket. Compare - DifferencePair, List, List> subDifference = - compareBucketCommon(sourceTuple.getBuckets() - .get(0), sinkTuple.getBuckets() - .get(0)); - difference.getDiffering() - .addAll(subDifference.getDiffering()); - difference.getOnlyOnLeft() - .addAll(subDifference.getOnlyOnLeft()); - difference.getOnlyOnRight() - .addAll(subDifference.getOnlyOnRight()); + DifferencePair, List, List> subDifference = null; + subDifference = compareBucketCommon(sourceTuple.getBuckets().get(0), sinkTuple.getBuckets().get(0)); + difference.getDiffering().addAll(subDifference.getDiffering()); + difference.getOnlyOnLeft().addAll(subDifference.getOnlyOnLeft()); + difference.getOnlyOnRight().addAll(subDifference.getOnlyOnRight()); } } else { throw new BucketNumberInconsistentException(String.format( - "table[%s] slice[%s] build the bucket number is inconsistent, source-bucket-count=[%s] sink-bucket-count=[%s]" - + " Please synchronize data again! ", slice.getTable(), slice.getNo(), sourceTuple.getBucketSize(), - sinkTuple.getBucketSize())); + "table[%s] slice[%s] build the bucket number is inconsistent, source-bucket-count=[%s] " + + "sink-bucket-count=[%s] Please synchronize data again! ", slice.getTable(), slice.getNo(), + sourceTuple.getBucketSize(), sinkTuple.getBucketSize())); } } @@ -193,24 +191,23 @@ public class SliceCheckWorker implements Runnable { private void checkResult(String resultMsg) { CheckDiffResultBuilder builder = CheckDiffResultBuilder.builder(); - builder.process(ConfigCache.getValue(ConfigConstants.PROCESS_NO)) - .table(slice.getTable()) - .sno(slice.getNo()) - .error(resultMsg) - .topic(getConcatTableTopics()) - .schema(slice.getSchema()) - .fileName(slice.getName()) - .conditionLimit(getConditionLimit()) - .partitions(slice.getPtnNum()) - .isTableStructureEquals(true) - .startTime(startTime) - .endTime(LocalDateTime.now()) - .isExistTableMiss(false, null) - .rowCount((int) sliceRowCount) - .errorRate(20) - .checkMode(ConfigCache.getValue(ConfigConstants.CHECK_MODE, CheckMode.class)) - .keyDiff(difference.getOnlyOnLeft(), difference.getDiffering(), difference.getOnlyOnRight()); + .table(slice.getTable()) + .sno(slice.getNo()) + .error(resultMsg) + .topic(getConcatTableTopics()) + .schema(slice.getSchema()) + .fileName(slice.getName()) + .conditionLimit(getConditionLimit()) + .partitions(slice.getPtnNum()) + .isTableStructureEquals(true) + .startTime(startTime) + .endTime(LocalDateTime.now()) + .isExistTableMiss(false, null) + .rowCount((int) sliceRowCount) + .errorRate(20) + .checkMode(ConfigCache.getValue(ConfigConstants.CHECK_MODE, CheckMode.class)) + .keyDiff(difference.getOnlyOnLeft(), difference.getDiffering(), difference.getOnlyOnRight()); CheckDiffResult result = builder.build(); LogUtils.debug(LOGGER, "result {}", result); checkContext.addCheckResult(slice, result); @@ -233,18 +230,13 @@ public class SliceCheckWorker implements Runnable { return; } diffNodeList.forEach(diffNode -> { - Bucket sourceBucket = diffNode.getSource() - .getBucket(); - Bucket sinkBucket = diffNode.getSink() - .getBucket(); - DifferencePair, List, List> subDifference = - compareBucketCommon(sourceBucket, sinkBucket); - difference.getDiffering() - .addAll(subDifference.getDiffering()); - difference.getOnlyOnLeft() - .addAll(subDifference.getOnlyOnLeft()); - difference.getOnlyOnRight() - .addAll(subDifference.getOnlyOnRight()); + Bucket sourceBucket = diffNode.getSource().getBucket(); + Bucket sinkBucket = diffNode.getSink().getBucket(); + DifferencePair, List, List> subDifference = compareBucketCommon( + sourceBucket, sinkBucket); + difference.getDiffering().addAll(subDifference.getDiffering()); + difference.getOnlyOnLeft().addAll(subDifference.getOnlyOnLeft()); + difference.getOnlyOnRight().addAll(subDifference.getOnlyOnRight()); }); diffNodeList.clear(); } @@ -257,13 +249,10 @@ public class SliceCheckWorker implements Runnable { List entriesOnlyOnLeft = collectorDeleteOrInsert(bucketDifference.entriesOnlyOnLeft()); List entriesOnlyOnRight = collectorDeleteOrInsert(bucketDifference.entriesOnlyOnRight()); List differing = collectorUpdate(bucketDifference.entriesDiffering()); - - LogUtils.debug(LOGGER, "diff slice {} insert {}", slice.getName(), bucketDifference.entriesOnlyOnLeft() - .size()); - LogUtils.debug(LOGGER, "diff slice {} delete {}", slice.getName(), bucketDifference.entriesOnlyOnRight() - .size()); - LogUtils.debug(LOGGER, "diff slice {} update {}", slice.getName(), bucketDifference.entriesDiffering() - .size()); + LogUtils.debug(LOGGER, "diff slice {} insert {}", slice.getName(), bucketDifference.entriesOnlyOnLeft().size()); + LogUtils.debug(LOGGER, "diff slice {} delete {}", slice.getName(), + bucketDifference.entriesOnlyOnRight().size()); + LogUtils.debug(LOGGER, "diff slice {} update {}", slice.getName(), bucketDifference.entriesDiffering().size()); return DifferencePair.of(entriesOnlyOnLeft, entriesOnlyOnRight, differing); } @@ -313,12 +302,11 @@ public class SliceCheckWorker implements Runnable { // Initialize source bucket column list data long startFetch = System.currentTimeMillis(); CountDownLatch countDownLatch = new CountDownLatch(checkTupleList.size()); - int avgSliceCount = (int) (sourceTuple.getSlice() - .getCount() + sinkTuple.getSlice() - .getCount()) / 2; + int avgSliceCount = (int) (sourceTuple.getSlice().getCount() + sinkTuple.getSlice().getCount()) / 2; KafkaConsumerHandler consumer = checkContext.createKafkaHandler(); checkTupleList.forEach(check -> { - initBucketList(check.getEndpoint(), check.getSlice(), check.getBuckets(), bucketDiff, avgSliceCount, consumer); + initBucketList(check.getEndpoint(), check.getSlice(), check.getBuckets(), bucketDiff, avgSliceCount, + consumer); countDownLatch.countDown(); }); countDownLatch.await(); @@ -336,30 +324,33 @@ public class SliceCheckWorker implements Runnable { } private void initBucketList(Endpoint endpoint, SliceExtend sliceExtend, List bucketList, - Map> bucketDiff, int avgSliceCount, KafkaConsumerHandler consumer) { + Map> bucketDiff, int avgSliceCount, KafkaConsumerHandler consumer) { // Use feign client to pull Kafka data List dataList = new LinkedList<>(); - TopicPartition topicPartition = new TopicPartition(Objects.equals(Endpoint.SOURCE, endpoint) ? - topic.getSourceTopicName() : topic.getSinkTopicName(), topic.getPtnNum()); + TopicPartition topicPartition = new TopicPartition( + Objects.equals(Endpoint.SOURCE, endpoint) ? topic.getSourceTopicName() : topic.getSinkTopicName(), + topic.getPtnNum()); int attempts = 0; - while (attempts < MAX_ATTEMPTS) { + while (attempts < maxAttemptsTimes) { try { consumer.consumerAssign(topicPartition, sliceExtend, attempts); consumer.pollTpSliceData(sliceExtend, dataList); break; // 如果成功,跳出循环 } catch (CheckConsumerPollEmptyException ex) { - if (++attempts >= MAX_ATTEMPTS) { + if (++attempts >= maxAttemptsTimes) { checkContext.returnConsumer(consumer); throw ex; // 如果达到最大尝试次数,重新抛出异常 } + ThreadUtil.sleepOneSecond(); + LogUtils.warn(LOGGER, "poll slice data {} {} , retry ({})", sliceExtend.getName(), sliceExtend.getNo(), + attempts); } } if (CollectionUtils.isEmpty(dataList)) { return; } - BuilderBucketHandler bucketBuilder = - new BuilderBucketHandler(ConfigCache.getIntValue(ConfigConstants.BUCKET_CAPACITY)); - + BuilderBucketHandler bucketBuilder = new BuilderBucketHandler( + ConfigCache.getIntValue(ConfigConstants.BUCKET_CAPACITY)); Map bucketMap = new ConcurrentHashMap<>(InitialCapacity.CAPACITY_128); // Use the pulled data to build the bucket list bucketBuilder.builder(dataList, avgSliceCount, bucketMap); @@ -394,12 +385,6 @@ public class SliceCheckWorker implements Runnable { bucketList.sort(Comparator.comparingInt(Bucket::getNumber)); } - private void getSliceDataFromTopicPartition(KafkaConsumerHandler consumer, SliceExtend sExtend, - List dataList) throws CheckConsumerPollEmptyException { - - - } - /** *
      * Align the bucket list data according to the statistical results of source
@@ -411,12 +396,10 @@ public class SliceCheckWorker implements Runnable {
         if (MapUtils.isNotEmpty(bucketDiff)) {
             bucketDiff.forEach((number, pair) -> {
                 if (pair.getSource() == -1) {
-                    sourceTuple.getBuckets()
-                               .add(BuilderBucketHandler.builderEmpty(number));
+                    sourceTuple.getBuckets().add(BuilderBucketHandler.builderEmpty(number));
                 }
                 if (pair.getSink() == -1) {
-                    sinkTuple.getBuckets()
-                             .add(BuilderBucketHandler.builderEmpty(number));
+                    sinkTuple.getBuckets().add(BuilderBucketHandler.builderEmpty(number));
                 }
             });
         }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
index d388461..afee709 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
@@ -91,12 +91,11 @@ public class MysqlDataAccessService extends AbstractDataAccessService {
     @Override
     public List queryTableUniqueColumns(String tableName) {
         String schema = properties.getSchema();
-        String sql = "select kcu.table_name tableName, kcu.column_name columnName,kcu.ordinal_position colIdx,"
-            + " kcu.constraint_name indexIdentifier from  information_schema.table_constraints tc "
-            + " left join information_schema.KEY_COLUMN_USAGE kcu on tc.table_schema =kcu.table_schema"
-            + " and tc.constraint_name=kcu.constraint_name and tc.table_name = kcu.table_name"
-            + " where tc.table_schema='" + schema + "' and tc.table_name='" + tableName + "'"
-            + " and tc.constraint_type='UNIQUE' ;";
+        String sql = "select s.table_schema,s.table_name tableName,s.column_name columnName,c.ordinal_position colIdx,"
+            + " s.index_name indexIdentifier from information_schema.statistics s "
+            + " left join information_schema.columns c on s.table_schema=c.table_schema  "
+            + " and s.table_schema=c.table_schema and s.table_name=c.table_name and s.column_name=c.column_name "
+            + " where s.table_schema='" + schema + "' and s.table_name='" + tableName + "'" + " and s.non_unique=0;";
         List uniqueColumns = adasQueryTableUniqueColumns(sql);
         return translateUniqueToPrimaryColumns(uniqueColumns);
     }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java
index e29bafb..bfaef45 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java
@@ -85,10 +85,12 @@ public class OracleDataAccessService extends AbstractDataAccessService {
     @Override
     public List queryTableUniqueColumns(String tableName) {
         String schema = properties.getSchema();
-        String sql = "SELECT uc.table_name tableName,uc.constraint_name indexIdentifier,ucc.column_name columnName,"
-            + " uc.constraint_type,ucc.position colIdx FROM USER_CONSTRAINTS uc "
-            + " JOIN USER_CONS_COLUMNS ucc ON uc.constraint_name=ucc.constraint_name "
-            + " WHERE uc.constraint_type='U' and uc.owner='" + schema + "'and uc.table_name='" + tableName + "'";
+        String sql = " SELECT ui.index_name indexIdentifier,ui.table_owner,ui.table_name tableName,"
+            + " utc.column_name columnName, utc.column_id colIdx"
+            + " from user_indexes ui left join user_ind_columns uic on ui.index_name=uic.index_name "
+            + " and ui.table_name=uic.table_name  "
+            + " left join user_tab_columns utc on ui.table_name =utc.table_name and uic.column_name=utc.column_name"
+            + " where ui.uniqueness='UNIQUE' and ui.table_owner='" + schema + "' and ui.table_name='" + tableName + "'";
         List uniqueColumns = adasQueryTableUniqueColumns(sql);
         return translateUniqueToPrimaryColumns(uniqueColumns);
     }
-- 
Gitee


From 704e15922e48e5700debb20e9b98ceab05b664ae Mon Sep 17 00:00:00 2001
From: mystarry-sky 
Date: Thu, 7 Nov 2024 11:35:34 +0800
Subject: [PATCH 4/9] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E6=A0=A1=E9=AA=8C?=
 =?UTF-8?q?=E5=A4=B1=E8=B4=A5=E5=A4=84=E7=90=86=E6=B5=81=E7=A8=8B=20?=
 =?UTF-8?q?=E6=97=A5=E5=BF=97=E6=89=93=E5=8D=B0=E5=BC=82=E5=B8=B8=E9=97=AE?=
 =?UTF-8?q?=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .../check/slice/SliceCheckEventHandler.java   |  4 +-
 .../data/access/MysqlDataAccessService.java   |  4 +-
 .../data/access/OpgsDataAccessService.java    | 60 +++++++++++++------
 3 files changed, 44 insertions(+), 24 deletions(-)

diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java
index 0e7771b..36d7218 100644
--- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java
+++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java
@@ -93,9 +93,7 @@ public class SliceCheckEventHandler {
      */
     public void handleFailed(SliceCheckEvent checkEvent) {
         LogUtils.warn(log, "slice check event , table slice has unknown error [{}][{} : {}]", checkEvent.getCheckName(),
-                checkEvent.getSource()
-                        .getTableHash(), checkEvent.getSink()
-                        .getTableHash());
+                checkEvent.getSource(), checkEvent.getSink());
         long count = getCheckSliceCount(checkEvent);
         sliceCheckContext.refreshSliceCheckProgress(checkEvent.getSlice(), count);
         CheckDiffResult result = buildSliceDiffResult(checkEvent.getSlice(), (int) count, true, "slice has unknown error");
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
index 40319a2..8383c69 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
@@ -65,8 +65,8 @@ public class MysqlDataAccessService extends AbstractDataAccessService {
     @Override
     public List dasQueryTableNameList() {
         String schema = properties.getSchema();
-        String sql = "SELECT info.table_name tableName FROM information_schema.tables info WHERE table_schema='"
-                + schema + "'";
+        String sql = "select info.table_name tableName from information_schema.tables info where table_schema='"
+                + schema + "'  and table_type='BASE TABLE'";
         return adasQueryTableNameList(sql);
     }
 
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java
index 6b5dca7..d910ef5 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java
@@ -27,6 +27,7 @@ import org.opengauss.datachecker.common.entry.extract.TableMetadata;
 import org.opengauss.datachecker.extract.data.mapper.OpgsMetaDataMapper;
 
 import javax.annotation.PostConstruct;
+
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -75,22 +76,43 @@ public class OpgsDataAccessService extends AbstractDataAccessService {
         return health(schema, sql);
     }
 
+    /**
+     * 
+     * DAS查询表名列表
+     *  select c.relname tableName from pg_class c  LEFT JOIN pg_namespace n on n.oid = c.relnamespace
+     *  where n.nspname=? and c.relkind ='r';
+     *  
+ * + * @return tableNameList + */ @Override public List dasQueryTableNameList() { String schema = properties.getSchema(); String sql = "select c.relname tableName from pg_class c LEFT JOIN pg_namespace n on n.oid = c.relnamespace " - + " where n.nspname='" + schema + "' and c.relkind ='r';"; + + " where n.nspname='" + schema + "' and c.relkind ='r';"; return adasQueryTableNameList(sql); } + /** + *
+     *     查询表主键列信息
+     *      select c.relname tableName,ns.nspname,ns.oid,a.attname columnName from pg_class c
+     *      left join pg_namespace ns on c.relnamespace=ns.oid
+     *      left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped
+     *      inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey)
+     *      where ns.nspname='test' and cs.contype='p';
+     * 
+ * + * @return primaryColumnList 主键列信息列表 + */ @Override public List queryTablePrimaryColumns() { String schema = properties.getSchema(); String sql = "select c.relname tableName,ns.nspname,ns.oid,a.attname columnName from pg_class c " - + "left join pg_namespace ns on c.relnamespace=ns.oid " - + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " - + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " - + "where ns.nspname='" + schema + "' and cs.contype='p';"; + + "left join pg_namespace ns on c.relnamespace=ns.oid " + + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " + + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " + + "where ns.nspname='" + schema + "' and cs.contype='p';"; return adasQueryTablePrimaryColumns(sql); } @@ -98,10 +120,10 @@ public class OpgsDataAccessService extends AbstractDataAccessService { public List queryTablePrimaryColumns(String tableName) { String schema = properties.getSchema(); String sql = "select c.relname tableName,ns.nspname,ns.oid,a.attname columnName from pg_class c " - + "left join pg_namespace ns on c.relnamespace=ns.oid " - + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " - + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " - + "where ns.nspname='" + schema + "' and c.relname='" + tableName + "' and cs.contype='p';"; + + "left join pg_namespace ns on c.relnamespace=ns.oid " + + "left join pg_attribute a on c.oid=a.attrelid and a.attnum>0 and not a.attisdropped " + + "inner join pg_constraint cs on a.attrelid=cs.conrelid and a.attnum=any(cs.conkey) " + + "where ns.nspname='" + schema + "' and c.relname='" + tableName + "' and cs.contype='p';"; return adasQueryTablePrimaryColumns(sql); } @@ -119,12 +141,12 @@ public class OpgsDataAccessService extends AbstractDataAccessService { public List dasQueryTableMetadataList() { LowerCaseTableNames lowerCaseTableNames = getLowerCaseTableNames(); String colTableName = Objects.equals(LowerCaseTableNames.SENSITIVE, lowerCaseTableNames) - ? "c.relname tableName" - : "lower(c.relname) tableName"; + ? "c.relname tableName" + : "lower(c.relname) tableName"; String sql = " select n.nspname tableSchema, " + colTableName + ",c.reltuples tableRows, " - + "case when c.reltuples>0 then pg_table_size(c.oid)/c.reltuples else 0 end as avgRowLength " - + "from pg_class c LEFT JOIN pg_namespace n on n.oid = c.relnamespace " + "where n.nspname='" - + properties.getSchema() + "' and c.relkind ='r';"; + + "case when c.reltuples>0 then pg_table_size(c.oid)/c.reltuples else 0 end as avgRowLength " + + "from pg_class c LEFT JOIN pg_namespace n on n.oid = c.relnamespace " + "where n.nspname='" + + properties.getSchema() + "' and c.relkind ='r';"; return wrapperTableMetadata(adasQueryTableMetadataList(sql)); } @@ -153,9 +175,9 @@ public class OpgsDataAccessService extends AbstractDataAccessService { @Override public List queryPointList(Connection connection, DataAccessParam param) { String sql = "select s.%s from ( select row_number() over(order by r.%s asc) as rn,r.%s from %s.%s r) s" - + " where mod(s.rn, %s ) = 1;"; + + " where mod(s.rn, %s ) = 1;"; sql = String.format(sql, param.getColName(), param.getColName(), param.getColName(), param.getSchema(), - param.getName(), param.getOffset()); + param.getName(), param.getOffset()); return adasQueryPointList(connection, sql); } @@ -164,7 +186,6 @@ public class OpgsDataAccessService extends AbstractDataAccessService { return opgsMetaDataMapper.checkDatabaseNotEmpty(properties.getSchema()); } - @Override public LowerCaseTableNames queryLowerCaseTableNames() { String sql = "SHOW VARIABLES LIKE \"lower_case_table_names\";"; @@ -181,7 +202,8 @@ public class OpgsDataAccessService extends AbstractDataAccessService { } finally { closeConnection(connection); } - return isOgCompatibilityB() ? result.getOrDefault(DOLPHIN_LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN) - : result.getOrDefault(LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN); + return isOgCompatibilityB() + ? result.getOrDefault(DOLPHIN_LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN) + : result.getOrDefault(LOWER_CASE_TABLE_NAMES, LowerCaseTableNames.UNKNOWN); } } -- Gitee From 09f1869899b5dc72ef9fd5e489d149b7ad1fa4fd Mon Sep 17 00:00:00 2001 From: mystarry-sky Date: Sat, 9 Nov 2024 16:09:19 +0800 Subject: [PATCH 5/9] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E8=A1=A8=E5=94=AF?= =?UTF-8?q?=E4=B8=80=E6=80=A7=E7=BA=A6=E6=9D=9F=E5=9C=BA=E6=99=AF=E8=A1=A8?= =?UTF-8?q?=E6=A0=A1=E9=AA=8C=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config/log4j2.xml | 10 +-- .../entry/extract/PrimaryColumnBean.java | 4 + .../entry/extract/UniqueColumnBean.java | 48 +++++++++++ .../extract/data/BaseDataService.java | 61 +++++++------- .../access/AbstractDataAccessService.java | 80 +++++++++++++++---- .../data/access/CsvDataAccessService.java | 5 ++ .../data/access/DataAccessService.java | 30 ++++--- .../data/access/MysqlDataAccessService.java | 35 +++++--- .../data/access/OpgsDataAccessService.java | 13 +++ .../data/access/OracleDataAccessService.java | 25 ++++-- 10 files changed, 231 insertions(+), 80 deletions(-) create mode 100644 datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java diff --git a/config/log4j2.xml b/config/log4j2.xml index e22d912..f8cd663 100644 --- a/config/log4j2.xml +++ b/config/log4j2.xml @@ -30,19 +30,13 @@ - + - - - - - - + diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java index 651e26e..5f85c51 100644 --- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java +++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/PrimaryColumnBean.java @@ -15,7 +15,9 @@ package org.opengauss.datachecker.common.entry.extract; +import lombok.AllArgsConstructor; import lombok.Data; +import lombok.NoArgsConstructor; /** * PrimaryColumnBean @@ -25,6 +27,8 @@ import lombok.Data; * @since :11 */ @Data +@NoArgsConstructor +@AllArgsConstructor public class PrimaryColumnBean { /** * Table diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java new file mode 100644 index 0000000..158f147 --- /dev/null +++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/UniqueColumnBean.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved. + * + * openGauss is licensed under Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * + * http://license.coscl.org.cn/MulanPSL2 + * + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, + * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, + * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +package org.opengauss.datachecker.common.entry.extract; + +import lombok.Data; + +/** + * UniqueColumnBean + * + * @author :wangchao + * @date :Created in 2023/12/23 + * @since :11 + */ +@Data +public class UniqueColumnBean { + /** + * Table + */ + private String tableName; + + /** + * Primary key column name + */ + private String columnName; + + /** + * Index identifier + */ + private String indexIdentifier; + + /** + * Column index + */ + private Integer colIdx; +} \ No newline at end of file diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java index 45d3c7c..54e9451 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/BaseDataService.java @@ -16,6 +16,7 @@ package org.opengauss.datachecker.extract.data; import com.alibaba.druid.pool.DruidDataSource; + import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.MapUtils; import org.apache.logging.log4j.Logger; @@ -33,6 +34,7 @@ import org.opengauss.datachecker.extract.service.RuleAdapterService; import org.springframework.stereotype.Service; import javax.annotation.Resource; + import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; @@ -103,15 +105,13 @@ public class BaseDataService { */ public List bdsQueryTableMetadataList() { List metadataList = dataAccessService.dasQueryTableMetadataList(); - return metadataList.stream() - .filter(meta -> { - boolean isChecking = ruleAdapterService.filterTableByRule(meta.getTableName()); - if (isChecking) { - tableNameList.add(meta.getTableName()); - } - return isChecking; - }) - .collect(Collectors.toList()); + return metadataList.stream().filter(meta -> { + boolean isChecking = ruleAdapterService.filterTableByRule(meta.getTableName()); + if (isChecking) { + tableNameList.add(meta.getTableName()); + } + return isChecking; + }).collect(Collectors.toList()); } /** @@ -124,8 +124,7 @@ public class BaseDataService { if (CollectionUtils.isEmpty(columnBeanList)) { return new HashMap<>(); } - return columnBeanList.stream() - .collect(Collectors.groupingBy(PrimaryColumnBean::getTableName)); + return columnBeanList.stream().collect(Collectors.groupingBy(PrimaryColumnBean::getTableName)); } private List filterByTableRules(List tableNameList) { @@ -189,28 +188,31 @@ public class BaseDataService { /** * update table metadata, and filter column rules * - * @param tableMetadata table metadata + * @param tableMetadata table metadata * @param primaryColumnBeans primary column */ public void updateTableColumnMetaData(TableMetadata tableMetadata, List primaryColumnBeans) { String tableName = tableMetadata.getTableName(); final List columns = dataAccessService.queryTableColumnsMetaData(tableName); - if (Objects.isNull(columns)) { + if (CollectionUtils.isEmpty(columns)) { LogUtils.error(log, "table columns metadata is null ,{}", tableName); return; } - if (Objects.isNull(primaryColumnBeans)) { - primaryColumnBeans = dataAccessService.queryTablePrimaryColumns(tableName); + List tempPrimaryColumnBeans = primaryColumnBeans; + if (CollectionUtils.isEmpty(primaryColumnBeans)) { + tempPrimaryColumnBeans = dataAccessService.queryTablePrimaryColumns(tableName); } - if (Objects.nonNull(primaryColumnBeans)) { - List primaryColumnNameList = getPrimaryColumnNames(primaryColumnBeans); + if (CollectionUtils.isEmpty(tempPrimaryColumnBeans)) { + tempPrimaryColumnBeans = dataAccessService.queryTableUniqueColumns(tableName); + } + if (CollectionUtils.isNotEmpty(tempPrimaryColumnBeans)) { + List primaryColumnNameList = getPrimaryColumnNames(tempPrimaryColumnBeans); for (ColumnsMetaData column : columns) { if (primaryColumnNameList.contains(column.getLowerCaseColumnName())) { column.setColumnKey(ColumnKey.PRI); } } } - tableMetadata.setColumnsMetas(ruleAdapterService.executeColumnRule(columns)); tableMetadata.setPrimaryMetas(getTablePrimaryColumn(columns)); tableMetadata.setTableHash(calcTableHash(columns)); @@ -218,16 +220,17 @@ public class BaseDataService { private List getPrimaryColumnNames(List primaryColumnBeans) { return primaryColumnBeans.stream() - .map(PrimaryColumnBean::getColumnName) - .map(String::toLowerCase) - .collect(Collectors.toList()); + .map(PrimaryColumnBean::getColumnName) + .map(String::toLowerCase) + .distinct() + .collect(Collectors.toList()); } private List getTablePrimaryColumn(List columnsMetaData) { return columnsMetaData.stream() - .filter(meta -> ColumnKey.PRI.equals(meta.getColumnKey())) - .sorted(Comparator.comparing(ColumnsMetaData::getOrdinalPosition)) - .collect(Collectors.toList()); + .filter(meta -> ColumnKey.PRI.equals(meta.getColumnKey())) + .sorted(Comparator.comparing(ColumnsMetaData::getOrdinalPosition)) + .collect(Collectors.toList()); } /** @@ -255,9 +258,8 @@ public class BaseDataService { private long calcTableHash(List columnsMetas) { StringBuilder buffer = new StringBuilder(); columnsMetas.sort(Comparator.comparing(ColumnsMetaData::getOrdinalPosition)); - columnsMetas.forEach(column -> buffer.append(column.getColumnName() - .toLowerCase(Locale.ENGLISH)) - .append(column.getOrdinalPosition())); + columnsMetas.forEach(column -> buffer.append(column.getColumnName().toLowerCase(Locale.ENGLISH)) + .append(column.getOrdinalPosition())); return HASH_UTIL.hashBytes(buffer.toString()); } @@ -289,9 +291,8 @@ public class BaseDataService { } else { String[] sqlModeArray = sqlMode.split(","); String newSqlMode = Arrays.stream(sqlModeArray) - .filter(mode -> !mode.equalsIgnoreCase( - ConfigConstants.SQL_MODE_NAME_PAD_CHAR_TO_FULL_LENGTH)) - .collect(Collectors.joining(",")); + .filter(mode -> !mode.equalsIgnoreCase(ConfigConstants.SQL_MODE_NAME_PAD_CHAR_TO_FULL_LENGTH)) + .collect(Collectors.joining(",")); boolean isPadCharFull = ConfigCache.getBooleanValue(ConfigConstants.SQL_MODE_PAD_CHAR_TO_FULL_LENGTH); if (isPadCharFull) { newSqlMode += ConfigConstants.SQL_MODE_NAME_PAD_CHAR_TO_FULL_LENGTH; diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java index a83d498..93b19b8 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/AbstractDataAccessService.java @@ -16,6 +16,9 @@ package org.opengauss.datachecker.extract.data.access; import com.alibaba.druid.pool.DruidDataSource; + +import cn.hutool.core.collection.CollUtil; + import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.opengauss.datachecker.common.config.ConfigCache; @@ -25,6 +28,7 @@ import org.opengauss.datachecker.common.entry.common.Health; import org.opengauss.datachecker.common.entry.enums.LowerCaseTableNames; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.common.exception.ExtractDataAccessException; import org.opengauss.datachecker.common.util.DurationUtils; import org.opengauss.datachecker.common.util.LogUtils; @@ -36,16 +40,19 @@ import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import javax.annotation.Resource; import javax.sql.DataSource; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.time.Duration; import java.time.LocalDateTime; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; /** * AbstractDataAccessService @@ -113,7 +120,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { public String adasQuerySchema(Connection connection, String executeQueryStatement) { String schema = ""; try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { if (resultSet.next()) { schema = resultSet.getString(RS_COL_SCHEMA); } @@ -129,7 +136,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { * 数据库schema是否合法 * * @param schema schema - * @param sql sql + * @param sql sql * @return result */ public Health health(String schema, String sql) { @@ -160,7 +167,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { Connection connection = getConnection(); List list = new LinkedList<>(); try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { while (resultSet.next()) { list.add(resultSet.getString(RS_COL_TABLE_NAME)); } @@ -185,7 +192,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { Connection connection = getConnection(); List list = new LinkedList<>(); try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { PrimaryColumnBean metadata; while (resultSet.next()) { metadata = new PrimaryColumnBean(); @@ -203,6 +210,50 @@ public abstract class AbstractDataAccessService implements DataAccessService { return list; } + /** + * adas查询表的唯一性约束列信息 + * + * @param executeQueryStatement executeQueryStatement + * @return List + */ + public List adasQueryTableUniqueColumns(String executeQueryStatement) { + Connection connection = getConnection(); + List list = new LinkedList<>(); + try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); + ResultSet resultSet = ps.executeQuery()) { + UniqueColumnBean metadata; + while (resultSet.next()) { + metadata = new UniqueColumnBean(); + metadata.setTableName(resultSet.getString("tableName")); + metadata.setColumnName(resultSet.getString("columnName")); + metadata.setIndexIdentifier(resultSet.getString("indexIdentifier")); + metadata.setColIdx(resultSet.getInt("colIdx")); + list.add(metadata); + } + } catch (SQLException esql) { + LogUtils.error(log, "adasQueryTablePrimaryColumns error:", esql); + } finally { + closeConnection(connection); + } + return list; + } + + /** + * 将UniqueColumnBean列表转换为PrimaryColumnBean列表 + * + * @param uniqueColumns 输入的UniqueColumnBean列表,可能为空 + * @return PrimaryColumnBean列表,永远不会为null,其中的元素是唯一的 + */ + public List translateUniqueToPrimaryColumns(List uniqueColumns) { + if (CollUtil.isEmpty(uniqueColumns)) { + return new ArrayList<>(); + } + return uniqueColumns.stream() + .map(u -> new PrimaryColumnBean(u.getTableName(), u.getColumnName())) + .distinct() + .collect(Collectors.toList()); + } + /** * adasQueryTableMetadataList * @@ -214,7 +265,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { Connection connection = getConnection(); List list = new LinkedList<>(); try (PreparedStatement ps = connection.prepareStatement(executeQueryStatement); - ResultSet resultSet = ps.executeQuery()) { + ResultSet resultSet = ps.executeQuery()) { TableMetadata metadata; while (resultSet.next()) { metadata = new TableMetadata(); @@ -238,7 +289,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { * 查询表数据抽样检查点清单 * * @param connection connection - * @param sql 检查点查询SQL + * @param sql 检查点查询SQL * @return 检查点列表 */ protected List adasQueryPointList(Connection connection, String sql) { @@ -259,7 +310,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { * 查询表数据抽样检查点清单 * * @param connection connection - * @param sql 检查点查询SQL + * @param sql 检查点查询SQL * @return 检查点列表 */ protected String adasQueryOnePoint(Connection connection, String sql) { @@ -277,8 +328,7 @@ public abstract class AbstractDataAccessService implements DataAccessService { } private long durationBetweenToMillis(LocalDateTime start, LocalDateTime end) { - return Duration.between(start, end) - .toMillis(); + return Duration.between(start, end).toMillis(); } /** @@ -292,15 +342,15 @@ public abstract class AbstractDataAccessService implements DataAccessService { return null; } return tableMetadata.setDataBaseType(properties.getDatabaseType()) - .setEndpoint(properties.getEndpoint()) - .setOgCompatibilityB(isOgCompatibilityB); + .setEndpoint(properties.getEndpoint()) + .setOgCompatibilityB(isOgCompatibilityB); } /** * jdbc mode does not use it * - * @param table table - * @param fileName fileName + * @param table table + * @param fileName fileName * @param differenceList differenceList * @return result */ @@ -317,8 +367,8 @@ public abstract class AbstractDataAccessService implements DataAccessService { */ protected List wrapperTableMetadata(List list) { list.forEach(meta -> meta.setDataBaseType(properties.getDatabaseType()) - .setEndpoint(properties.getEndpoint()) - .setOgCompatibilityB(isOgCompatibilityB)); + .setEndpoint(properties.getEndpoint()) + .setOgCompatibilityB(isOgCompatibilityB)); return list; } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java index f8eb60a..322796b 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/CsvDataAccessService.java @@ -269,4 +269,9 @@ public class CsvDataAccessService implements DataAccessService { public LowerCaseTableNames queryLowerCaseTableNames() { return LowerCaseTableNames.INSENSITIVE; } + + @Override + public List queryTableUniqueColumns(String tableName) { + return null; + } } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java index 5d2e84d..6eaef38 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/DataAccessService.java @@ -25,6 +25,7 @@ import org.opengauss.datachecker.common.entry.extract.TableMetadata; import org.springframework.jdbc.core.RowMapper; import javax.sql.DataSource; + import java.sql.Connection; import java.util.List; import java.util.Map; @@ -111,7 +112,7 @@ public interface DataAccessService { * query table column min value * * @param connection connection - * @param param param + * @param param param * @return min value of string */ String min(Connection connection, DataAccessParam param); @@ -120,7 +121,7 @@ public interface DataAccessService { * query table column max value * * @param connection connection - * @param param param + * @param param param * @return max value of string */ String max(Connection connection, DataAccessParam param); @@ -136,10 +137,10 @@ public interface DataAccessService { /** * query row data by sql * - * @param sql sql - * @param param sql param + * @param sql sql + * @param param sql param * @param rowMapper row mapper - * @param data type + * @param data type * @return data */ List query(String sql, Map param, RowMapper rowMapper); @@ -147,10 +148,10 @@ public interface DataAccessService { /** * query data from csv file * - * @param table table - * @param fileName fileName + * @param table table + * @param fileName fileName * @param differenceList differenceList - * @return + * @return data */ List> query(String table, String fileName, List differenceList); @@ -165,7 +166,7 @@ public interface DataAccessService { * query table check point list * * @param connection connection - * @param param param + * @param param param * @return point list */ List queryPointList(Connection connection, DataAccessParam param); @@ -187,4 +188,15 @@ public interface DataAccessService { * @return value */ LowerCaseTableNames queryLowerCaseTableNames(); + + /** + * query table unique columns + *
+     *     唯一性约束与唯一性索引
+     * 
+ * + * @param tableName table + * @return unique columns + */ + List queryTableUniqueColumns(String tableName); } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java index 8383c69..d388461 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java @@ -21,6 +21,7 @@ import org.opengauss.datachecker.common.entry.enums.LowerCaseTableNames; import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.extract.data.mapper.MysqlMetaDataMapper; import java.sql.Connection; @@ -52,8 +53,8 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public Health health() { String schema = properties.getSchema(); - String sql = "SELECT SCHEMA_NAME tableSchema FROM information_schema.SCHEMATA info WHERE SCHEMA_NAME='" - + schema + "' limit 1"; + String sql = "SELECT SCHEMA_NAME tableSchema FROM information_schema.SCHEMATA info WHERE SCHEMA_NAME='" + schema + + "' limit 1"; return health(schema, sql); } @@ -66,7 +67,7 @@ public class MysqlDataAccessService extends AbstractDataAccessService { public List dasQueryTableNameList() { String schema = properties.getSchema(); String sql = "select info.table_name tableName from information_schema.tables info where table_schema='" - + schema + "' and table_type='BASE TABLE'"; + + schema + "' and table_type='BASE TABLE'"; return adasQueryTableNameList(sql); } @@ -83,11 +84,23 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public List queryTablePrimaryColumns() { String sql = "select table_name tableName ,lower(column_name) columnName from information_schema.columns " - + "where table_schema='" + properties.getSchema() - + "' and column_key='PRI' order by ordinal_position asc "; + + "where table_schema='" + properties.getSchema() + "' and column_key='PRI' order by ordinal_position asc "; return adasQueryTablePrimaryColumns(sql); } + @Override + public List queryTableUniqueColumns(String tableName) { + String schema = properties.getSchema(); + String sql = "select kcu.table_name tableName, kcu.column_name columnName,kcu.ordinal_position colIdx," + + " kcu.constraint_name indexIdentifier from information_schema.table_constraints tc " + + " left join information_schema.KEY_COLUMN_USAGE kcu on tc.table_schema =kcu.table_schema" + + " and tc.constraint_name=kcu.constraint_name and tc.table_name = kcu.table_name" + + " where tc.table_schema='" + schema + "' and tc.table_name='" + tableName + "'" + + " and tc.constraint_type='UNIQUE' ;"; + List uniqueColumns = adasQueryTableUniqueColumns(sql); + return translateUniqueToPrimaryColumns(uniqueColumns); + } + @Override public List queryTablePrimaryColumns(String tableName) { return mysqlMetaDataMapper.queryTablePrimaryColumnsByTableName(properties.getSchema(), tableName); @@ -97,11 +110,11 @@ public class MysqlDataAccessService extends AbstractDataAccessService { public List dasQueryTableMetadataList() { LowerCaseTableNames lowerCaseTableNames = getLowerCaseTableNames(); String colTableName = Objects.equals(LowerCaseTableNames.SENSITIVE, lowerCaseTableNames) - ? "info.table_name tableName" - : "lower(info.table_name) tableName"; + ? "info.table_name tableName" + : "lower(info.table_name) tableName"; String sql = " SELECT info.TABLE_SCHEMA tableSchema," + colTableName + ",info.table_rows tableRows , " - + "info.avg_row_length avgRowLength FROM information_schema.tables info WHERE TABLE_SCHEMA='" - + properties.getSchema() + "'"; + + "info.avg_row_length avgRowLength FROM information_schema.tables info WHERE TABLE_SCHEMA='" + + properties.getSchema() + "'"; return wrapperTableMetadata(adasQueryTableMetadataList(sql)); } @@ -130,9 +143,9 @@ public class MysqlDataAccessService extends AbstractDataAccessService { @Override public List queryPointList(Connection connection, DataAccessParam param) { String sql = "select s.%s from (SELECT @rowno:=@rowno+1 as rn,r.%s from %s.%s r," - + " (select @rowno := 0) t ORDER BY r.%s asc) s where mod(s.rn, %s) = 1"; + + " (select @rowno := 0) t ORDER BY r.%s asc) s where mod(s.rn, %s) = 1"; sql = String.format(sql, param.getColName(), param.getColName(), param.getSchema(), param.getName(), - param.getColName(), param.getOffset()); + param.getColName(), param.getOffset()); return adasQueryPointList(connection, sql); } diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java index d910ef5..86a0843 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OpgsDataAccessService.java @@ -24,6 +24,7 @@ import org.opengauss.datachecker.common.entry.enums.OgCompatibility; import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.extract.data.mapper.OpgsMetaDataMapper; import javax.annotation.PostConstruct; @@ -127,6 +128,18 @@ public class OpgsDataAccessService extends AbstractDataAccessService { return adasQueryTablePrimaryColumns(sql); } + @Override + public List queryTableUniqueColumns(String tableName) { + String schema = properties.getSchema(); + String sql = "SELECT c.relname AS tableName, ns.nspname, i.indexrelid indexIdentifier, " + + " a.attname AS columnName, a.attnum colIdx FROM pg_index i" + + " JOIN pg_class c ON i.indrelid = c.oid join pg_namespace ns on c.relnamespace=ns.oid" + + " JOIN pg_attribute a ON i.indrelid = a.attrelid AND a.attnum = ANY(i.indkey) " + + " where ns.nspname='" + schema + "' and c.relname='" + tableName + "' and i.indisunique = true;"; + List uniqueColumns = adasQueryTableUniqueColumns(sql); + return translateUniqueToPrimaryColumns(uniqueColumns); + } + @Override public List queryTableColumnsMetaData(String tableName) { return opgsMetaDataMapper.queryTableColumnsMetaData(properties.getSchema(), tableName); diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java index b2426c1..e29bafb 100644 --- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java +++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java @@ -21,6 +21,7 @@ import org.opengauss.datachecker.common.entry.enums.LowerCaseTableNames; import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData; import org.opengauss.datachecker.common.entry.extract.PrimaryColumnBean; import org.opengauss.datachecker.common.entry.extract.TableMetadata; +import org.opengauss.datachecker.common.entry.extract.UniqueColumnBean; import org.opengauss.datachecker.extract.data.mapper.OracleMetaDataMapper; import java.sql.Connection; @@ -76,11 +77,22 @@ public class OracleDataAccessService extends AbstractDataAccessService { @Override public List queryTablePrimaryColumns() { String sql = "SELECT A.TABLE_NAME tableName, A.COLUMN_NAME columnName FROM ALL_CONS_COLUMNS A,ALL_CONSTRAINTS B" - + " WHERE A.constraint_name = B.constraint_name AND B.constraint_type = 'P' AND A.OWNER = '" - + properties.getSchema() + "'"; + + " WHERE A.constraint_name = B.constraint_name AND B.constraint_type = 'P' AND A.OWNER = '" + + properties.getSchema() + "'"; return adasQueryTablePrimaryColumns(sql); } + @Override + public List queryTableUniqueColumns(String tableName) { + String schema = properties.getSchema(); + String sql = "SELECT uc.table_name tableName,uc.constraint_name indexIdentifier,ucc.column_name columnName," + + " uc.constraint_type,ucc.position colIdx FROM USER_CONSTRAINTS uc " + + " JOIN USER_CONS_COLUMNS ucc ON uc.constraint_name=ucc.constraint_name " + + " WHERE uc.constraint_type='U' and uc.owner='" + schema + "'and uc.table_name='" + tableName + "'"; + List uniqueColumns = adasQueryTableUniqueColumns(sql); + return translateUniqueToPrimaryColumns(uniqueColumns); + } + @Override public List queryTablePrimaryColumns(String tableName) { return oracleMetaDataMapper.queryTablePrimaryColumnsByTableName(properties.getSchema(), tableName); @@ -91,12 +103,11 @@ public class OracleDataAccessService extends AbstractDataAccessService { String schema = properties.getSchema(); LowerCaseTableNames lowerCaseTableNames = getLowerCaseTableNames(); String colTableName = Objects.equals(LowerCaseTableNames.SENSITIVE, lowerCaseTableNames) - ? "t.table_name tableName" - : "lower(t.table_name) tableName"; + ? "t.table_name tableName" + : "lower(t.table_name) tableName"; String sql = "SELECT t.owner tableSchema," + colTableName + ",t.num_rows tableRows,avg_row_len avgRowLength" - + " FROM ALL_TABLES t LEFT JOIN (SELECT DISTINCT table_name from ALL_CONSTRAINTS where OWNER = '" - + schema + "' AND constraint_type='P') pc on t.table_name=pc.table_name WHERE t.OWNER = '" - + schema + "'"; + + " FROM ALL_TABLES t LEFT JOIN (SELECT DISTINCT table_name from ALL_CONSTRAINTS where OWNER = '" + schema + + "' AND constraint_type='P') pc on t.table_name=pc.table_name WHERE t.OWNER = '" + schema + "'"; return wrapperTableMetadata(adasQueryTableMetadataList(sql)); } -- Gitee From 3b7aac4b0b55ed56927be1e5f39737c1a1cdcdea Mon Sep 17 00:00:00 2001 From: mystarry-sky Date: Mon, 11 Nov 2024 16:12:14 +0800 Subject: [PATCH 6/9] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E8=A1=A8=E5=94=AF?= =?UTF-8?q?=E4=B8=80=E6=80=A7=E7=B4=A2=E5=BC=95=E5=9C=BA=E6=99=AF=E8=A1=A8?= =?UTF-8?q?=E6=A0=A1=E9=AA=8C=E3=80=82=20=E4=BF=AE=E5=A4=8Dconsumer?= =?UTF-8?q?=E6=8B=89=E5=8F=96=E5=BC=82=E5=B8=B8=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../modules/check/KafkaConsumerHandler.java | 27 ++-- .../check/slice/SliceCheckContext.java | 15 +- .../check/slice/SliceCheckWorker.java | 135 ++++++++---------- .../data/access/MysqlDataAccessService.java | 11 +- .../data/access/OracleDataAccessService.java | 10 +- 5 files changed, 98 insertions(+), 100 deletions(-) diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java index e142040..3a83c2a 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/KafkaConsumerHandler.java @@ -16,6 +16,7 @@ package org.opengauss.datachecker.check.modules.check; import com.alibaba.fastjson.JSON; + import org.apache.commons.lang3.StringUtils; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -26,6 +27,7 @@ import org.opengauss.datachecker.common.entry.extract.RowDataHash; import org.opengauss.datachecker.common.entry.extract.SliceExtend; import org.opengauss.datachecker.common.exception.CheckConsumerPollEmptyException; import org.opengauss.datachecker.common.util.LogUtils; +import org.opengauss.datachecker.common.util.ThreadUtil; import java.time.Duration; import java.util.*; @@ -44,10 +46,11 @@ public class KafkaConsumerHandler { private static final int MAX_CONSUMER_POLL_TIMES = 50; private KafkaConsumer kafkaConsumer; + /** * Constructor * - * @param consumer consumer + * @param consumer consumer * @param retryTimes retryTimes */ public KafkaConsumerHandler(KafkaConsumer consumer, int retryTimes) { @@ -66,6 +69,7 @@ public class KafkaConsumerHandler { /** * 获取kafka consumer * + * @return consumer */ public KafkaConsumer getConsumer() { return kafkaConsumer; @@ -74,7 +78,7 @@ public class KafkaConsumerHandler { /** * Query the Kafka partition data corresponding to the specified table * - * @param topic Kafka topic + * @param topic Kafka topic * @param partitions Kafka partitions * @return kafka partitions data */ @@ -96,8 +100,8 @@ public class KafkaConsumerHandler { * consumer poll data from the topic partition, and filter bu slice extend. then add data in the data list. * * @param topicPartition topic partition - * @param sExtend slice extend - * @param attempts + * @param sExtend slice extend + * @param attempts attempts */ public void consumerAssign(TopicPartition topicPartition, SliceExtend sExtend, int attempts) { kafkaConsumer.assign(List.of(topicPartition)); @@ -109,20 +113,21 @@ public class KafkaConsumerHandler { /** * consumer poll data from the topic partition, and filter bu slice extend. then add data in the data list. * - * @param sExtend slice extend + * @param sExtend slice extend * @param dataList data list */ public synchronized void pollTpSliceData(SliceExtend sExtend, List dataList) { AtomicLong currentCount = new AtomicLong(0); int pollEmptyCount = 0; while (currentCount.get() < sExtend.getCount()) { - ConsumerRecords records = - kafkaConsumer.poll(Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); + ConsumerRecords records = kafkaConsumer.poll( + Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); if (records.count() <= 0) { pollEmptyCount++; if (pollEmptyCount > MAX_CONSUMER_POLL_TIMES) { throw new CheckConsumerPollEmptyException(sExtend.getName()); } + ThreadUtil.sleep(KAFKA_CONSUMER_POLL_DURATION); continue; } pollEmptyCount = 0; @@ -139,8 +144,8 @@ public class KafkaConsumerHandler { /** * Query the Kafka partition data corresponding to the specified table * - * @param topic Kafka topic - * @param partitions Kafka partitions + * @param topic Kafka topic + * @param partitions Kafka partitions * @param shouldChangeConsumerGroup if true change consumer Group random * @return kafka partitions data */ @@ -188,8 +193,8 @@ public class KafkaConsumerHandler { } private void getTopicRecords(List dataList, KafkaConsumer kafkaConsumer) { - ConsumerRecords consumerRecords = - kafkaConsumer.poll(Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); + ConsumerRecords consumerRecords = kafkaConsumer.poll( + Duration.ofMillis(KAFKA_CONSUMER_POLL_DURATION)); consumerRecords.forEach(record -> { dataList.add(JSON.parseObject(record.value(), RowDataHash.class)); }); diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java index 5b23f55..9f0bc4f 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckContext.java @@ -81,10 +81,19 @@ public class SliceCheckContext { kafkaConsumerService.getRetryFetchRecordTimes()); } + /** + * get consumer retry fetch record times + * + * @return duration times + */ + public int getRetryFetchRecordTimes() { + return kafkaConsumerService.getRetryFetchRecordTimes(); + } + /** * get source or sink table topic * - * @param table table + * @param table table * @param endpoint source or sink * @return topic name */ @@ -97,7 +106,7 @@ public class SliceCheckContext { /** * refresh slice check progress * - * @param slice slice + * @param slice slice * @param rowCount slice of row count */ public void refreshSliceCheckProgress(SliceVo slice, long rowCount) { @@ -107,7 +116,7 @@ public class SliceCheckContext { /** * add slice check Result * - * @param slice slice + * @param slice slice * @param result check result */ public void addCheckResult(SliceVo slice, CheckDiffResult result) { diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java index 29c6bd0..26b42af 100644 --- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java +++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckWorker.java @@ -17,6 +17,7 @@ package org.opengauss.datachecker.check.slice; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; + import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.MapUtils; import org.apache.kafka.common.TopicPartition; @@ -47,6 +48,7 @@ import org.opengauss.datachecker.common.exception.BucketNumberInconsistentExcept import org.opengauss.datachecker.common.exception.CheckConsumerPollEmptyException; import org.opengauss.datachecker.common.exception.MerkleTreeDepthException; import org.opengauss.datachecker.common.util.LogUtils; +import org.opengauss.datachecker.common.util.ThreadUtil; import org.opengauss.datachecker.common.util.TopicUtil; import org.springframework.lang.NonNull; @@ -72,8 +74,6 @@ import java.util.concurrent.CountDownLatch; public class SliceCheckWorker implements Runnable { private static final Logger LOGGER = LogUtils.getLogger(SliceCheckWorker.class); private static final int THRESHOLD_MIN_BUCKET_SIZE = 2; - // 设置最大尝试次数 - private static final int MAX_ATTEMPTS=5; private final SliceVo slice; @@ -81,17 +81,19 @@ public class SliceCheckWorker implements Runnable { private final SliceCheckEvent checkEvent; private final SliceCheckContext checkContext; private final TaskRegisterCenter registerCenter; - private final DifferencePair, List, List> difference = - DifferencePair.of(new LinkedList<>(), new LinkedList<>(), new LinkedList<>()); + private final DifferencePair, List, List> difference = DifferencePair.of( + new LinkedList<>(), new LinkedList<>(), new LinkedList<>()); private final LocalDateTime startTime; private long sliceRowCount; + // 设置最大尝试次数 + private int maxAttemptsTimes; private Topic topic = new Topic(); /** * slice check worker construct * - * @param checkEvent check event + * @param checkEvent check event * @param sliceCheckContext slice check context */ public SliceCheckWorker(SliceCheckEvent checkEvent, SliceCheckContext sliceCheckContext, @@ -102,6 +104,7 @@ public class SliceCheckWorker implements Runnable { this.slice = checkEvent.getSlice(); this.registerCenter = registerCenter; this.processNo = ConfigCache.getValue(ConfigConstants.PROCESS_NO); + this.maxAttemptsTimes = sliceCheckContext.getRetryFetchRecordTimes(); } @Override @@ -168,22 +171,17 @@ public class SliceCheckWorker implements Runnable { LogUtils.debug(LOGGER, "slice {} fetch empty", slice.getName()); } else { // sourceSize is less than thresholdMinBucketSize, that is, there is only one bucket. Compare - DifferencePair, List, List> subDifference = - compareBucketCommon(sourceTuple.getBuckets() - .get(0), sinkTuple.getBuckets() - .get(0)); - difference.getDiffering() - .addAll(subDifference.getDiffering()); - difference.getOnlyOnLeft() - .addAll(subDifference.getOnlyOnLeft()); - difference.getOnlyOnRight() - .addAll(subDifference.getOnlyOnRight()); + DifferencePair, List, List> subDifference = null; + subDifference = compareBucketCommon(sourceTuple.getBuckets().get(0), sinkTuple.getBuckets().get(0)); + difference.getDiffering().addAll(subDifference.getDiffering()); + difference.getOnlyOnLeft().addAll(subDifference.getOnlyOnLeft()); + difference.getOnlyOnRight().addAll(subDifference.getOnlyOnRight()); } } else { throw new BucketNumberInconsistentException(String.format( - "table[%s] slice[%s] build the bucket number is inconsistent, source-bucket-count=[%s] sink-bucket-count=[%s]" - + " Please synchronize data again! ", slice.getTable(), slice.getNo(), sourceTuple.getBucketSize(), - sinkTuple.getBucketSize())); + "table[%s] slice[%s] build the bucket number is inconsistent, source-bucket-count=[%s] " + + "sink-bucket-count=[%s] Please synchronize data again! ", slice.getTable(), slice.getNo(), + sourceTuple.getBucketSize(), sinkTuple.getBucketSize())); } } @@ -193,24 +191,23 @@ public class SliceCheckWorker implements Runnable { private void checkResult(String resultMsg) { CheckDiffResultBuilder builder = CheckDiffResultBuilder.builder(); - builder.process(ConfigCache.getValue(ConfigConstants.PROCESS_NO)) - .table(slice.getTable()) - .sno(slice.getNo()) - .error(resultMsg) - .topic(getConcatTableTopics()) - .schema(slice.getSchema()) - .fileName(slice.getName()) - .conditionLimit(getConditionLimit()) - .partitions(slice.getPtnNum()) - .isTableStructureEquals(true) - .startTime(startTime) - .endTime(LocalDateTime.now()) - .isExistTableMiss(false, null) - .rowCount((int) sliceRowCount) - .errorRate(20) - .checkMode(ConfigCache.getValue(ConfigConstants.CHECK_MODE, CheckMode.class)) - .keyDiff(difference.getOnlyOnLeft(), difference.getDiffering(), difference.getOnlyOnRight()); + .table(slice.getTable()) + .sno(slice.getNo()) + .error(resultMsg) + .topic(getConcatTableTopics()) + .schema(slice.getSchema()) + .fileName(slice.getName()) + .conditionLimit(getConditionLimit()) + .partitions(slice.getPtnNum()) + .isTableStructureEquals(true) + .startTime(startTime) + .endTime(LocalDateTime.now()) + .isExistTableMiss(false, null) + .rowCount((int) sliceRowCount) + .errorRate(20) + .checkMode(ConfigCache.getValue(ConfigConstants.CHECK_MODE, CheckMode.class)) + .keyDiff(difference.getOnlyOnLeft(), difference.getDiffering(), difference.getOnlyOnRight()); CheckDiffResult result = builder.build(); LogUtils.debug(LOGGER, "result {}", result); checkContext.addCheckResult(slice, result); @@ -233,18 +230,13 @@ public class SliceCheckWorker implements Runnable { return; } diffNodeList.forEach(diffNode -> { - Bucket sourceBucket = diffNode.getSource() - .getBucket(); - Bucket sinkBucket = diffNode.getSink() - .getBucket(); - DifferencePair, List, List> subDifference = - compareBucketCommon(sourceBucket, sinkBucket); - difference.getDiffering() - .addAll(subDifference.getDiffering()); - difference.getOnlyOnLeft() - .addAll(subDifference.getOnlyOnLeft()); - difference.getOnlyOnRight() - .addAll(subDifference.getOnlyOnRight()); + Bucket sourceBucket = diffNode.getSource().getBucket(); + Bucket sinkBucket = diffNode.getSink().getBucket(); + DifferencePair, List, List> subDifference = compareBucketCommon( + sourceBucket, sinkBucket); + difference.getDiffering().addAll(subDifference.getDiffering()); + difference.getOnlyOnLeft().addAll(subDifference.getOnlyOnLeft()); + difference.getOnlyOnRight().addAll(subDifference.getOnlyOnRight()); }); diffNodeList.clear(); } @@ -257,13 +249,10 @@ public class SliceCheckWorker implements Runnable { List entriesOnlyOnLeft = collectorDeleteOrInsert(bucketDifference.entriesOnlyOnLeft()); List entriesOnlyOnRight = collectorDeleteOrInsert(bucketDifference.entriesOnlyOnRight()); List differing = collectorUpdate(bucketDifference.entriesDiffering()); - - LogUtils.debug(LOGGER, "diff slice {} insert {}", slice.getName(), bucketDifference.entriesOnlyOnLeft() - .size()); - LogUtils.debug(LOGGER, "diff slice {} delete {}", slice.getName(), bucketDifference.entriesOnlyOnRight() - .size()); - LogUtils.debug(LOGGER, "diff slice {} update {}", slice.getName(), bucketDifference.entriesDiffering() - .size()); + LogUtils.debug(LOGGER, "diff slice {} insert {}", slice.getName(), bucketDifference.entriesOnlyOnLeft().size()); + LogUtils.debug(LOGGER, "diff slice {} delete {}", slice.getName(), + bucketDifference.entriesOnlyOnRight().size()); + LogUtils.debug(LOGGER, "diff slice {} update {}", slice.getName(), bucketDifference.entriesDiffering().size()); return DifferencePair.of(entriesOnlyOnLeft, entriesOnlyOnRight, differing); } @@ -313,12 +302,11 @@ public class SliceCheckWorker implements Runnable { // Initialize source bucket column list data long startFetch = System.currentTimeMillis(); CountDownLatch countDownLatch = new CountDownLatch(checkTupleList.size()); - int avgSliceCount = (int) (sourceTuple.getSlice() - .getCount() + sinkTuple.getSlice() - .getCount()) / 2; + int avgSliceCount = (int) (sourceTuple.getSlice().getCount() + sinkTuple.getSlice().getCount()) / 2; KafkaConsumerHandler consumer = checkContext.createKafkaHandler(); checkTupleList.forEach(check -> { - initBucketList(check.getEndpoint(), check.getSlice(), check.getBuckets(), bucketDiff, avgSliceCount, consumer); + initBucketList(check.getEndpoint(), check.getSlice(), check.getBuckets(), bucketDiff, avgSliceCount, + consumer); countDownLatch.countDown(); }); countDownLatch.await(); @@ -336,30 +324,33 @@ public class SliceCheckWorker implements Runnable { } private void initBucketList(Endpoint endpoint, SliceExtend sliceExtend, List bucketList, - Map> bucketDiff, int avgSliceCount, KafkaConsumerHandler consumer) { + Map> bucketDiff, int avgSliceCount, KafkaConsumerHandler consumer) { // Use feign client to pull Kafka data List dataList = new LinkedList<>(); - TopicPartition topicPartition = new TopicPartition(Objects.equals(Endpoint.SOURCE, endpoint) ? - topic.getSourceTopicName() : topic.getSinkTopicName(), topic.getPtnNum()); + TopicPartition topicPartition = new TopicPartition( + Objects.equals(Endpoint.SOURCE, endpoint) ? topic.getSourceTopicName() : topic.getSinkTopicName(), + topic.getPtnNum()); int attempts = 0; - while (attempts < MAX_ATTEMPTS) { + while (attempts < maxAttemptsTimes) { try { consumer.consumerAssign(topicPartition, sliceExtend, attempts); consumer.pollTpSliceData(sliceExtend, dataList); break; // 如果成功,跳出循环 } catch (CheckConsumerPollEmptyException ex) { - if (++attempts >= MAX_ATTEMPTS) { + if (++attempts >= maxAttemptsTimes) { checkContext.returnConsumer(consumer); throw ex; // 如果达到最大尝试次数,重新抛出异常 } + ThreadUtil.sleepOneSecond(); + LogUtils.warn(LOGGER, "poll slice data {} {} , retry ({})", sliceExtend.getName(), sliceExtend.getNo(), + attempts); } } if (CollectionUtils.isEmpty(dataList)) { return; } - BuilderBucketHandler bucketBuilder = - new BuilderBucketHandler(ConfigCache.getIntValue(ConfigConstants.BUCKET_CAPACITY)); - + BuilderBucketHandler bucketBuilder = new BuilderBucketHandler( + ConfigCache.getIntValue(ConfigConstants.BUCKET_CAPACITY)); Map bucketMap = new ConcurrentHashMap<>(InitialCapacity.CAPACITY_128); // Use the pulled data to build the bucket list bucketBuilder.builder(dataList, avgSliceCount, bucketMap); @@ -394,12 +385,6 @@ public class SliceCheckWorker implements Runnable { bucketList.sort(Comparator.comparingInt(Bucket::getNumber)); } - private void getSliceDataFromTopicPartition(KafkaConsumerHandler consumer, SliceExtend sExtend, - List dataList) throws CheckConsumerPollEmptyException { - - - } - /** *
      * Align the bucket list data according to the statistical results of source
@@ -411,12 +396,10 @@ public class SliceCheckWorker implements Runnable {
         if (MapUtils.isNotEmpty(bucketDiff)) {
             bucketDiff.forEach((number, pair) -> {
                 if (pair.getSource() == -1) {
-                    sourceTuple.getBuckets()
-                               .add(BuilderBucketHandler.builderEmpty(number));
+                    sourceTuple.getBuckets().add(BuilderBucketHandler.builderEmpty(number));
                 }
                 if (pair.getSink() == -1) {
-                    sinkTuple.getBuckets()
-                             .add(BuilderBucketHandler.builderEmpty(number));
+                    sinkTuple.getBuckets().add(BuilderBucketHandler.builderEmpty(number));
                 }
             });
         }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
index d388461..afee709 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/MysqlDataAccessService.java
@@ -91,12 +91,11 @@ public class MysqlDataAccessService extends AbstractDataAccessService {
     @Override
     public List queryTableUniqueColumns(String tableName) {
         String schema = properties.getSchema();
-        String sql = "select kcu.table_name tableName, kcu.column_name columnName,kcu.ordinal_position colIdx,"
-            + " kcu.constraint_name indexIdentifier from  information_schema.table_constraints tc "
-            + " left join information_schema.KEY_COLUMN_USAGE kcu on tc.table_schema =kcu.table_schema"
-            + " and tc.constraint_name=kcu.constraint_name and tc.table_name = kcu.table_name"
-            + " where tc.table_schema='" + schema + "' and tc.table_name='" + tableName + "'"
-            + " and tc.constraint_type='UNIQUE' ;";
+        String sql = "select s.table_schema,s.table_name tableName,s.column_name columnName,c.ordinal_position colIdx,"
+            + " s.index_name indexIdentifier from information_schema.statistics s "
+            + " left join information_schema.columns c on s.table_schema=c.table_schema  "
+            + " and s.table_schema=c.table_schema and s.table_name=c.table_name and s.column_name=c.column_name "
+            + " where s.table_schema='" + schema + "' and s.table_name='" + tableName + "'" + " and s.non_unique=0;";
         List uniqueColumns = adasQueryTableUniqueColumns(sql);
         return translateUniqueToPrimaryColumns(uniqueColumns);
     }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java
index e29bafb..bfaef45 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/data/access/OracleDataAccessService.java
@@ -85,10 +85,12 @@ public class OracleDataAccessService extends AbstractDataAccessService {
     @Override
     public List queryTableUniqueColumns(String tableName) {
         String schema = properties.getSchema();
-        String sql = "SELECT uc.table_name tableName,uc.constraint_name indexIdentifier,ucc.column_name columnName,"
-            + " uc.constraint_type,ucc.position colIdx FROM USER_CONSTRAINTS uc "
-            + " JOIN USER_CONS_COLUMNS ucc ON uc.constraint_name=ucc.constraint_name "
-            + " WHERE uc.constraint_type='U' and uc.owner='" + schema + "'and uc.table_name='" + tableName + "'";
+        String sql = " SELECT ui.index_name indexIdentifier,ui.table_owner,ui.table_name tableName,"
+            + " utc.column_name columnName, utc.column_id colIdx"
+            + " from user_indexes ui left join user_ind_columns uic on ui.index_name=uic.index_name "
+            + " and ui.table_name=uic.table_name  "
+            + " left join user_tab_columns utc on ui.table_name =utc.table_name and uic.column_name=utc.column_name"
+            + " where ui.uniqueness='UNIQUE' and ui.table_owner='" + schema + "' and ui.table_name='" + tableName + "'";
         List uniqueColumns = adasQueryTableUniqueColumns(sql);
         return translateUniqueToPrimaryColumns(uniqueColumns);
     }
-- 
Gitee


From 9f135d2c584bda2a204f6733c4f72d8a5088a81e Mon Sep 17 00:00:00 2001
From: mystarry-sky 
Date: Wed, 20 Nov 2024 15:09:10 +0800
Subject: [PATCH 7/9] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E6=A0=A1=E9=AA=8C?=
 =?UTF-8?q?=E6=97=A5=E5=BF=97=E6=BB=9A=E5=8A=A8=E8=A7=84=E5=88=99=EF=BC=8C?=
 =?UTF-8?q?=E5=8F=96=E6=B6=88=E6=97=B6=E9=97=B4=E6=BB=9A=E5=8A=A8=E7=AD=96?=
 =?UTF-8?q?=E7=95=A5=EF=BC=8C=E5=A2=9E=E5=8A=A0=E5=90=AF=E5=8A=A8=E6=BB=9A?=
 =?UTF-8?q?=E5=8A=A8=E8=A7=84=E5=88=99?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 config/log4j2.xml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/config/log4j2.xml b/config/log4j2.xml
index f8cd663..5f2e854 100644
--- a/config/log4j2.xml
+++ b/config/log4j2.xml
@@ -30,13 +30,13 @@
             
         
 
-        
+        
             
             
+                
                 
-                
             
-
             
                 
             
-- 
Gitee


From 2cfba537b7124d8dba5bac58ec7804781d3ea2c6 Mon Sep 17 00:00:00 2001
From: mystarry-sky 
Date: Wed, 20 Nov 2024 15:12:54 +0800
Subject: [PATCH 8/9] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E6=A0=A1=E9=AA=8C?=
 =?UTF-8?q?=E6=97=A5=E5=BF=97=E6=BB=9A=E5=8A=A8=E8=A7=84=E5=88=99=EF=BC=8C?=
 =?UTF-8?q?=E5=8F=96=E6=B6=88=E6=97=B6=E9=97=B4=E6=BB=9A=E5=8A=A8=E7=AD=96?=
 =?UTF-8?q?=E7=95=A5=EF=BC=8C=E5=A2=9E=E5=8A=A0=E5=90=AF=E5=8A=A8=E6=BB=9A?=
 =?UTF-8?q?=E5=8A=A8=E8=A7=84=E5=88=99?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 config/log4j2.xml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/config/log4j2.xml b/config/log4j2.xml
index f8cd663..5f2e854 100644
--- a/config/log4j2.xml
+++ b/config/log4j2.xml
@@ -30,13 +30,13 @@
             
         
 
-        
+        
             
             
+                
                 
-                
             
-
             
                 
             
-- 
Gitee


From 5f0188ab3be45a8e275591ae8c38ab94bb616b83 Mon Sep 17 00:00:00 2001
From: mystarry-sky 
Date: Mon, 2 Dec 2024 15:48:44 +0800
Subject: [PATCH 9/9] =?UTF-8?q?=E6=A0=A1=E9=AA=8C=E5=B7=A5=E5=85=B7?=
 =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E8=81=94=E5=90=88=E4=B8=BB=E9=94=AE=E5=88=86?=
 =?UTF-8?q?=E7=89=87=E6=8A=BD=E5=8F=96=E5=86=85=E5=AD=98=E6=BA=A2=E5=87=BA?=
 =?UTF-8?q?=E9=97=AE=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 config/application-sink.yml                   |  10 +-
 config/application-source.yml                 |  10 +-
 config/application.yml                        |   4 +-
 .../check/AbstractCheckDiffResultBuilder.java |  24 +-
 .../check/slice/SliceCheckEventHandler.java   |  71 ++---
 .../common/entry/common/CheckPointData.java   |   7 +
 .../common/entry/extract/SliceVo.java         |   2 +-
 .../common/entry/extract/TableMetadata.java   |  22 +-
 .../common/service/ShutdownService.java       |   3 +
 .../datachecker/common/util/ThreadUtil.java   |  60 ++--
 .../extract/config/AsyncConfig.java           |  30 +-
 .../service/DataExtractServiceImpl.java       |  84 +++---
 .../slice/ExtractPointSwapManager.java        |  19 +-
 .../extract/slice/SliceProcessorContext.java  |  26 +-
 .../slice/common/SliceResultSetSender.java    |  69 +++--
 .../slice/process/AbstractProcessor.java      |   2 +-
 .../slice/process/JdbcSliceProcessor.java     | 260 +++++++++++++++---
 .../slice/process/JdbcTableProcessor.java     |  12 +-
 .../datachecker/extract/task/CheckPoint.java  |  26 +-
 .../extract/task/ExtractTaskRunnable.java     | 110 ++++----
 .../extract/task/ResultSetHandler.java        |  33 ++-
 .../task/sql/QueryStatementFactory.java       |  18 +-
 .../extract/task/sql/SelectSqlBuilder.java    | 149 +++++-----
 .../sql/SinglePrimarySliceQueryStatement.java |   2 +-
 .../sql/UnionPrimarySliceQueryStatement.java  | 108 ++++++++
 .../datachecker/extract/util/HashHandler.java |  45 ++-
 .../src/main/resources/application-sink.yml   |   1 +
 .../src/main/resources/application-source.yml |   1 +
 .../dao/BaseDataResultSetHandlerTest.java     |   5 +-
 run.sh                                        | 100 +++++++
 30 files changed, 888 insertions(+), 425 deletions(-)
 create mode 100644 datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/UnionPrimarySliceQueryStatement.java
 create mode 100644 run.sh

diff --git a/config/application-sink.yml b/config/application-sink.yml
index e201312..3003894 100644
--- a/config/application-sink.yml
+++ b/config/application-sink.yml
@@ -5,11 +5,11 @@ logging:
 spring:
   check:
     server-uri: http://127.0.0.1:9000
-    core-pool-size: 5
-    maximum-pool-size: 10
+    core-pool-size: 3
+    maximum-pool-size: 3
     maximum-topic-size: 1
-    maximum-table-slice-size: 100000
-    extend-maximum-pool-size: 5
+    maximum-table-slice-size: 10000
+    extend-maximum-pool-size: 3
   extract:
     schema: test
     databaseType: OG
@@ -27,7 +27,7 @@ spring:
     # driver-class-name: com.mysql.cj.jdbc.Driver
     # url: jdbc:mysql://127.0.0.1:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=UTC&allowPublicKeyRetrieval=true
     driver-class-name: org.opengauss.Driver
-    url: jdbc:opengauss://127.0.0.1:5432/postgres?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=UTC&bitToString=true
+    url: jdbc:opengauss://127.0.0.1:5432/postgres?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=UTC&bitToString=true&loggerLevel=OFF&autocommit=false
     # driver-class-name: oracle.jdbc.OracleDriver
     # url: jdbc:oracle:thin:@127.0.0.1:1521/TEST
     username:
diff --git a/config/application-source.yml b/config/application-source.yml
index a6ea15b..2c3d136 100644
--- a/config/application-source.yml
+++ b/config/application-source.yml
@@ -6,11 +6,11 @@ logging:
 spring:
   check:
     server-uri: http://127.0.0.1:9000
-    core-pool-size: 5
-    maximum-pool-size: 10
+    core-pool-size: 3
+    maximum-pool-size: 3
     maximum-topic-size: 1
-    maximum-table-slice-size: 100000
-    extend-maximum-pool-size: 5
+    maximum-table-slice-size: 10000
+    extend-maximum-pool-size: 3
   extract:
     schema: test
     databaseType: MS # For MySQL
@@ -32,7 +32,7 @@ spring:
     bootstrap-servers: localhost:9092
   datasource:
     driver-class-name: com.mysql.cj.jdbc.Driver
-    url: jdbc:mysql://127.0.0.1:3306/mysql?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=UTC&allowPublicKeyRetrieval=true
+    url: jdbc:mysql://127.0.0.1:3306/mysql?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=UTC&allowPublicKeyRetrieval=true&dontAutoCommit=true
     # driver-class-name: org.opengauss.Driver # For openGauss
     # url: # jdbc:opengauss://127.0.0.1:5432/postgres?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=UTC&bitToString=true # For openGauss
     # driver-class-name: oracle.jdbc.OracleDriver
diff --git a/config/application.yml b/config/application.yml
index da778ee..eb627d0 100644
--- a/config/application.yml
+++ b/config/application.yml
@@ -6,8 +6,8 @@ spring:
   kafka:
     bootstrap-servers: localhost:9092
   check:
-    core-pool-size: 5
-    maximum-pool-size: 10
+    core-pool-size: 3
+    maximum-pool-size: 3
     maximum-topic-size: 1
 data:
   check:
diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/AbstractCheckDiffResultBuilder.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/AbstractCheckDiffResultBuilder.java
index 024f503..18cced2 100644
--- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/AbstractCheckDiffResultBuilder.java
+++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/modules/check/AbstractCheckDiffResultBuilder.java
@@ -16,6 +16,7 @@
 package org.opengauss.datachecker.check.modules.check;
 
 import lombok.Getter;
+
 import org.apache.logging.log4j.Logger;
 import org.opengauss.datachecker.common.entry.check.Difference;
 import org.opengauss.datachecker.common.entry.enums.CheckMode;
@@ -40,7 +41,8 @@ import java.util.stream.Collectors;
  * @since :11
  */
 @Getter
-public abstract class AbstractCheckDiffResultBuilder> {
+public abstract class AbstractCheckDiffResultBuilder> {
     private static final Logger log = LogUtils.getLogger();
     private static final int MAX_DIFF_REPAIR_SIZE = 5000;
 
@@ -173,7 +175,7 @@ public abstract class AbstractCheckDiffResultBuilder insert, List update, List delete) {
-        this.keyInsert.addAll(insert);
-        this.keyUpdate.addAll(update);
-        this.keyDelete.addAll(delete);
-        this.keyInsertSet.addAll(insert.stream().map(Difference::getKey).collect(Collectors.toSet()));
-        this.keyUpdateSet.addAll(update.stream().map(Difference::getKey).collect(Collectors.toSet()));
-        this.keyDeleteSet.addAll(delete.stream().map(Difference::getKey).collect(Collectors.toSet()));
+        if (Objects.nonNull(insert)) {
+            this.keyInsert.addAll(insert);
+            this.keyInsertSet.addAll(insert.stream().map(Difference::getKey).collect(Collectors.toSet()));
+        }
+        if (Objects.nonNull(update)) {
+            this.keyUpdate.addAll(update);
+            this.keyUpdateSet.addAll(update.stream().map(Difference::getKey).collect(Collectors.toSet()));
+        }
+        if (Objects.nonNull(delete)) {
+            this.keyDelete.addAll(delete);
+            this.keyDeleteSet.addAll(delete.stream().map(Difference::getKey).collect(Collectors.toSet()));
+        }
         diffSort.sort(this.keyInsert);
         diffSort.sort(this.keyUpdate);
         diffSort.sort(this.keyDelete);
diff --git a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java
index 36d7218..d96d8f8 100644
--- a/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java
+++ b/datachecker-check/src/main/java/org/opengauss/datachecker/check/slice/SliceCheckEventHandler.java
@@ -77,58 +77,59 @@ public class SliceCheckEventHandler {
             }
         } else {
             LogUtils.info(log, "slice check event , table structure diff [{}][{} : {}]", checkEvent.getCheckName(),
-                checkEvent.getSource()
-                          .getTableHash(), checkEvent.getSink()
-                                                     .getTableHash());
+                checkEvent.getSource().getTableHash(), checkEvent.getSink().getTableHash());
             handleTableStructureDiff(checkEvent);
-            registerCenter.refreshCheckedTableCompleted(checkEvent.getSlice()
-                                                                  .getTable());
+            registerCenter.refreshCheckedTableCompleted(checkEvent.getSlice().getTable());
         }
     }
 
     /**
      * 添加校验失败分片事件处理流程
      *
-     * @param checkEvent
+     * @param checkEvent checkEvent
      */
     public void handleFailed(SliceCheckEvent checkEvent) {
         LogUtils.warn(log, "slice check event , table slice has unknown error [{}][{} : {}]", checkEvent.getCheckName(),
-                checkEvent.getSource(), checkEvent.getSink());
+            checkEvent.getSource(), checkEvent.getSink());
         long count = getCheckSliceCount(checkEvent);
         sliceCheckContext.refreshSliceCheckProgress(checkEvent.getSlice(), count);
-        CheckDiffResult result = buildSliceDiffResult(checkEvent.getSlice(), (int) count, true, "slice has unknown error");
+        CheckDiffResult result = buildSliceDiffResult(checkEvent.getSlice(), (int) count, true,
+            "slice has unknown error");
         sliceCheckContext.addCheckResult(checkEvent.getSlice(), result);
-        registerCenter.refreshCheckedTableCompleted(checkEvent.getSlice()
-                .getTable());
+        registerCenter.refreshCheckedTableCompleted(checkEvent.getSlice().getTable());
     }
 
     private static long getCheckSliceCount(SliceCheckEvent checkEvent) {
         SliceExtend source = checkEvent.getSource();
         SliceExtend sink = checkEvent.getSink();
-        long count = Math.max(source.getCount(), sink.getCount());
-        return count;
+        if (Objects.nonNull(sink) && Objects.nonNull(source)) {
+            return Math.max(source.getCount(), sink.getCount());
+        } else {
+            return Objects.nonNull(sink) ? sink.getCount() : Objects.nonNull(source) ? source.getCount() : 0;
+        }
     }
 
     private void handleTableStructureDiff(SliceCheckEvent checkEvent) {
         long count = getCheckSliceCount(checkEvent);
         sliceCheckContext.refreshSliceCheckProgress(checkEvent.getSlice(), count);
-        CheckDiffResult result = buildSliceDiffResult(checkEvent.getSlice(), (int) count, false, "table structure diff");
+        CheckDiffResult result = buildSliceDiffResult(checkEvent.getSlice(), (int) count, false,
+            "table structure diff");
         sliceCheckContext.addTableStructureDiffResult(checkEvent.getSlice(), result);
     }
 
     private CheckDiffResult buildSliceDiffResult(SliceVo slice, int count, boolean isTableStructure, String message) {
         CheckDiffResultBuilder builder = CheckDiffResultBuilder.builder();
         builder.checkMode(ConfigCache.getCheckMode())
-                .process(ConfigCache.getValue(ConfigConstants.PROCESS_NO))
-                .schema(slice.getSchema())
-                .table(slice.getTable())
-                .sno(slice.getNo())
-                .startTime(LocalDateTime.now())
-                .endTime(LocalDateTime.now())
-                .isTableStructureEquals(isTableStructure)
-                .isExistTableMiss(false, null)
-                .rowCount(count)
-                .error(message);
+            .process(ConfigCache.getValue(ConfigConstants.PROCESS_NO))
+            .schema(slice.getSchema())
+            .table(slice.getTable())
+            .sno(slice.getNo())
+            .startTime(LocalDateTime.now())
+            .endTime(LocalDateTime.now())
+            .isTableStructureEquals(isTableStructure)
+            .isExistTableMiss(false, null)
+            .rowCount(count)
+            .error(message);
         return builder.build();
     }
 
@@ -141,25 +142,25 @@ public class SliceCheckEventHandler {
     /**
      * handleIgnoreTable
      *
-     * @param slice  slice
+     * @param slice slice
      * @param source source
-     * @param sink   sink
+     * @param sink sink
      */
     public void handleIgnoreTable(SliceVo slice, SliceExtend source, SliceExtend sink) {
         sliceCheckContext.refreshSliceCheckProgress(slice, 0);
         CheckDiffResultBuilder builder = CheckDiffResultBuilder.builder();
         Endpoint existEndpoint = Objects.nonNull(source) && Objects.isNull(sink) ? Endpoint.SOURCE : Endpoint.SINK;
         builder.checkMode(ConfigCache.getCheckMode())
-               .process(ConfigCache.getValue(ConfigConstants.PROCESS_NO))
-               .schema(slice.getSchema())
-               .table(slice.getTable())
-               .sno(slice.getNo())
-               .startTime(LocalDateTime.now())
-               .endTime(LocalDateTime.now())
-               .isTableStructureEquals(false)
-               .isExistTableMiss(true, existEndpoint)
-               .rowCount(0)
-               .error("table miss");
+            .process(ConfigCache.getValue(ConfigConstants.PROCESS_NO))
+            .schema(slice.getSchema())
+            .table(slice.getTable())
+            .sno(slice.getNo())
+            .startTime(LocalDateTime.now())
+            .endTime(LocalDateTime.now())
+            .isTableStructureEquals(false)
+            .isExistTableMiss(true, existEndpoint)
+            .rowCount(0)
+            .error("table miss");
         CheckDiffResult result = builder.build();
         sliceCheckContext.addTableStructureDiffResult(slice, result);
         registerCenter.refreshCheckedTableCompleted(slice.getTable());
diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/common/CheckPointData.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/common/CheckPointData.java
index 2cdfba1..2bbde1c 100644
--- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/common/CheckPointData.java
+++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/common/CheckPointData.java
@@ -17,6 +17,7 @@ package org.opengauss.datachecker.common.entry.common;
 
 import lombok.Data;
 import lombok.experimental.Accessors;
+
 import org.opengauss.datachecker.common.entry.enums.Endpoint;
 
 import java.util.List;
@@ -33,4 +34,10 @@ public class CheckPointData {
     private String tableName;
     private boolean isDigit;
     private List checkPointList;
+
+    @Override
+    public String toString() {
+        return "endpoint=" + endpoint + ", tableName=" + tableName + ", isDigit=" + isDigit + ", checkPointList="
+            + checkPointList.size();
+    }
 }
diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/SliceVo.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/SliceVo.java
index 7eab24b..9392c3c 100644
--- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/SliceVo.java
+++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/SliceVo.java
@@ -78,6 +78,6 @@ public class SliceVo extends BaseSlice {
             return super.getName() + " total=" + super.getTotal() + " no=" + super.getNo() + ", [ fetch full ]";
         }
         return super.getName() + " total=" + super.getTotal() + " no=" + super.getNo() + ", [" + super.getBeginIdx()
-            + " , " + super.getEndIdx() + " ]" + " fetchSize=" + super.getFetchSize();
+            + " , " + super.getEndIdx() + " ]";
     }
 }
diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/TableMetadata.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/TableMetadata.java
index 8842669..7283fc9 100644
--- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/TableMetadata.java
+++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/entry/extract/TableMetadata.java
@@ -18,6 +18,7 @@ package org.opengauss.datachecker.common.entry.extract;
 import lombok.Data;
 import lombok.ToString;
 import lombok.experimental.Accessors;
+
 import org.opengauss.datachecker.common.entry.enums.DataBaseType;
 import org.opengauss.datachecker.common.entry.enums.Endpoint;
 import org.springframework.util.CollectionUtils;
@@ -84,8 +85,7 @@ public class TableMetadata {
         if (primaryMetas == null || primaryMetas.size() != 1) {
             return false;
         }
-        return primaryMetas.get(0)
-                           .isAutoIncrementColumn();
+        return primaryMetas.get(0).isAutoIncrementColumn();
     }
 
     /**
@@ -97,6 +97,20 @@ public class TableMetadata {
         return !CollectionUtils.isEmpty(primaryMetas) && primaryMetas.size() == 1;
     }
 
+    /**
+     * judge if this table is union primary key table.
+     *
+     * @return true if primary is union primary key
+     */
+    public boolean isUnionPrimary() {
+        return !CollectionUtils.isEmpty(primaryMetas) && primaryMetas.size() > 1;
+    }
+
+    /**
+     * judge if this table is single col primary key table.
+     *
+     * @return true if primary is union primary key
+     */
     public ColumnsMetaData getSinglePrimary() {
         if (hasPrimary()) {
             return primaryMetas.get(0);
@@ -134,8 +148,6 @@ public class TableMetadata {
 
     public static TableMetadata parse(ResultSet rs, String schema, Endpoint endpoint, DataBaseType databaseType)
         throws SQLException {
-        return parse(rs).setSchema(schema)
-                        .setEndpoint(endpoint)
-                        .setDataBaseType(databaseType);
+        return parse(rs).setSchema(schema).setEndpoint(endpoint).setDataBaseType(databaseType);
     }
 }
diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/service/ShutdownService.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/service/ShutdownService.java
index 7daa050..261fe15 100644
--- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/service/ShutdownService.java
+++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/service/ShutdownService.java
@@ -35,6 +35,8 @@ public class ShutdownService {
     @Resource
     private DynamicThreadPoolManager dynamicThreadPoolManager;
     @Resource
+    private ThreadPoolTaskExecutor sliceSendExecutor;
+    @Resource
     private ProcessLogService processLogService;
 
     @Async
@@ -50,6 +52,7 @@ public class ShutdownService {
         processLogService.saveStopProcessLog();
         threadExecutorList.forEach(ExecutorConfigurationSupport::shutdown);
         executorServiceList.forEach(ExecutorService::shutdownNow);
+        sliceSendExecutor.shutdown();
         System.exit(SpringApplication.exit(SpringUtil.getApplicationContext()));
     }
 
diff --git a/datachecker-common/src/main/java/org/opengauss/datachecker/common/util/ThreadUtil.java b/datachecker-common/src/main/java/org/opengauss/datachecker/common/util/ThreadUtil.java
index ca7cdc2..98d2b4a 100644
--- a/datachecker-common/src/main/java/org/opengauss/datachecker/common/util/ThreadUtil.java
+++ b/datachecker-common/src/main/java/org/opengauss/datachecker/common/util/ThreadUtil.java
@@ -16,17 +16,14 @@
 package org.opengauss.datachecker.common.util;
 
 import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.logging.log4j.Logger;
 
-import java.util.Arrays;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  * ThreadUtil
@@ -64,6 +61,19 @@ public class ThreadUtil {
         }
     }
 
+    /**
+     * sleep circle,max sleep time is 5 seconds (sleep 1-5 sec)
+     *
+     * @param times current circle times
+     */
+    public static void sleepCircle(int times) {
+        try {
+            TimeUnit.SECONDS.sleep(times / 5 + 1);
+        } catch (InterruptedException ie) {
+            LogUtils.warn(log, "thread sleep interrupted exception ");
+        }
+    }
+
     /**
      * The current thread sleeps for 10 - 500 milliseconds
      */
@@ -103,47 +113,12 @@ public class ThreadUtil {
         sleep(RandomUtils.nextInt(100, 500));
     }
 
-    /**
-     * kill thread by thread name
-     *
-     * @param name thread name
-     */
-    public static void killThreadByName(String name) {
-        AtomicInteger threadCount = new AtomicInteger(0);
-        do {
-            ThreadGroup currentGroup = Thread.currentThread().getThreadGroup();
-            int noThreads = currentGroup.activeCount();
-            Thread[] lstThreads = new Thread[noThreads];
-            currentGroup.enumerate(lstThreads);
-            threadCount.set(0);
-            Arrays.stream(lstThreads)
-                    .filter(thread -> {
-                        if (StringUtils.containsIgnoreCase(thread.getName(), name)) {
-                            threadCount.incrementAndGet();
-                            return true;
-                        }
-                        return false;
-                    })
-                    .forEach(thread -> {
-                        if (thread.getState().equals(Thread.State.WAITING)) {
-                            log.warn("thread [{}] :[{} ] has interrupted", thread.getName(), thread.getState());
-                            thread.interrupt();
-                        } else {
-                            threadCount.decrementAndGet();
-                            log.warn("thread [{}] :[{} ]  has stop", thread.getName(), thread.getState());
-                            thread.stop();
-                        }
-                    });
-        } while (threadCount.get() > 0);
-
-    }
-
     /**
      * Custom thread pool construction
      *
      * @return thread pool
      */
-    @SuppressWarnings({"all"})
+    @SuppressWarnings( {"all"} )
     public static ExecutorService newSingleThreadExecutor() {
         return Executors.newFixedThreadPool(1, Executors.defaultThreadFactory());
     }
@@ -154,14 +129,11 @@ public class ThreadUtil {
      * @return Scheduled task single thread
      */
     public static ScheduledExecutorService newSingleThreadScheduledExecutor() {
-        return new ScheduledThreadPoolExecutor(1, new BasicThreadFactory.Builder().daemon(true)
-                .build());
+        return new ScheduledThreadPoolExecutor(1, new BasicThreadFactory.Builder().daemon(true).build());
     }
 
     public static ScheduledExecutorService newSingleThreadScheduledExecutor(String name) {
-        return new ScheduledThreadPoolExecutor(1, new BasicThreadFactory.
-                Builder().namingPattern(name)
-                .build());
+        return new ScheduledThreadPoolExecutor(1, new BasicThreadFactory.Builder().namingPattern(name).build());
     }
 
 }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/config/AsyncConfig.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/config/AsyncConfig.java
index 200ce05..d63f3fa 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/config/AsyncConfig.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/config/AsyncConfig.java
@@ -15,6 +15,7 @@
 
 package org.opengauss.datachecker.extract.config;
 
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.scheduling.annotation.AsyncConfigurer;
@@ -22,6 +23,8 @@ import org.springframework.scheduling.annotation.EnableAsync;
 import org.springframework.scheduling.annotation.EnableScheduling;
 import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
 
+import javax.annotation.PreDestroy;
+
 /**
  * AsyncConfig
  *
@@ -29,10 +32,15 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
  * @date 2022/5/8 19:17
  * @since 11
  **/
-@EnableAsync
 @EnableScheduling
 @Configuration
+@EnableAsync(proxyTargetClass = true)
 public class AsyncConfig implements AsyncConfigurer {
+    private ThreadPoolTaskExecutor executor;
+    @Value("${spring.check.core-pool-size}")
+    private int corePoolSize;
+    @Value("${spring.check.maximum-pool-size}")
+    private int maxPoolSize;
 
     /**
      * Asynchronous processing scenario for data extraction non-core business
@@ -40,14 +48,22 @@ public class AsyncConfig implements AsyncConfigurer {
      * @return ThreadPoolTaskExecutor
      */
     @Override
-    @Bean(name = "taskAsyncExecutor")
+    @Bean(name = "sliceSendExecutor")
     public ThreadPoolTaskExecutor getAsyncExecutor() {
-        ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
-        executor.setCorePoolSize(1);
-        executor.setMaxPoolSize(5);
-        executor.setQueueCapacity(1000);
-        executor.setThreadNamePrefix("TaskAsyncExecutor-");
+        executor = new ThreadPoolTaskExecutor();
+        executor.setCorePoolSize(corePoolSize);
+        executor.setMaxPoolSize(maxPoolSize);
+        executor.setQueueCapacity(10000);
+        executor.setThreadNamePrefix("slice-send-executor-");
         executor.initialize();
         return executor;
     }
+
+    /**
+     * destroy executor
+     */
+    @PreDestroy
+    public void closeExecutor() {
+        executor.shutdown();
+    }
 }
\ No newline at end of file
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/service/DataExtractServiceImpl.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/service/DataExtractServiceImpl.java
index 4744fc0..fd607fa 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/service/DataExtractServiceImpl.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/service/DataExtractServiceImpl.java
@@ -64,6 +64,7 @@ import org.springframework.stereotype.Service;
 import org.springframework.util.StopWatch;
 
 import javax.annotation.Resource;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -72,9 +73,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
-import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.stream.Collectors;
 
@@ -152,8 +151,8 @@ public class DataExtractServiceImpl implements DataExtractService {
      *
      * @param processNo Execution process number
      * @throws ProcessMultipleException The previous instance is executing the data extraction service.
-     *                                  It cannot restart the new verification
-     *                                  and throws a ProcessMultipleException exception.
+     * It cannot restart the new verification
+     * and throws a ProcessMultipleException exception.
      */
     @Override
     public PageExtract buildExtractTaskAllTables(String processNo) throws ProcessMultipleException {
@@ -174,7 +173,7 @@ public class DataExtractServiceImpl implements DataExtractService {
             return PageExtract.buildInitPage(taskList.size());
         } else {
             LogUtils.error(log, "process={} is running extract task , {} please wait ... ", atomicProcessNo.get(),
-                    processNo);
+                processNo);
             throw new ProcessMultipleException("process {" + atomicProcessNo.get() + "} is running extract task");
         }
     }
@@ -185,8 +184,7 @@ public class DataExtractServiceImpl implements DataExtractService {
         int startIdx = pageExtract.getPageStartIdx();
         int endIdx = pageExtract.getPageEndIdx();
         for (; startIdx < pageExtract.getSize() && startIdx < endIdx; startIdx++) {
-            pageList.add(taskReference.get()
-                    .get(startIdx));
+            pageList.add(taskReference.get().get(startIdx));
         }
         LogUtils.info(log, "fetchExtractTaskPageTables ={}", pageExtract);
         return pageList;
@@ -197,8 +195,8 @@ public class DataExtractServiceImpl implements DataExtractService {
      *
      * @param taskList taskList
      * @throws ProcessMultipleException The previous instance is executing the data extraction service.
-     *                                  It cannot restart the new verification
-     *                                  and throws a ProcessMultipleException exception.
+     * It cannot restart the new verification
+     * and throws a ProcessMultipleException exception.
      */
     @Override
     public void dispatchSinkExtractTaskPage(@NonNull List taskList) throws ProcessMultipleException {
@@ -212,22 +210,18 @@ public class DataExtractServiceImpl implements DataExtractService {
         // Verify whether the task list built on the source side exists on the destination side,
         // and filter the nonexistent task list
         final Set tableNames = MetaDataCache.getAllKeys();
-
         if (CollectionUtils.isEmpty(taskList) || CollectionUtils.isEmpty(tableNames)) {
             LogUtils.info(log, "build extract task process={} taskList={} ,MetaCache tableNames={}", processNo,
-                    taskList.size(), tableNames);
+                taskList.size(), tableNames);
             return;
         }
         final List extractTasks = taskList.stream()
-                .filter(task -> tableNames.contains(task.getTableName()))
-                .collect(Collectors.toList());
+            .filter(task -> tableNames.contains(task.getTableName()))
+            .collect(Collectors.toList());
         extractTasks.forEach(this::updateSinkMetadata);
-        taskReference.get()
-                .addAll(extractTasks);
-        LogUtils.info(log, "build extract task process={} count={},", processNo, taskReference.get()
-                .size());
+        taskReference.get().addAll(extractTasks);
+        LogUtils.info(log, "build extract task process={} count={},", processNo, taskReference.get().size());
         atomicProcessNo.set(processNo);
-
         // taskCountMap is used to count the number of tasks in table fragment query
         Map taskCountMap = new HashMap<>(Constants.InitialCapacity.EMPTY);
         taskList.forEach(task -> {
@@ -250,8 +244,7 @@ public class DataExtractServiceImpl implements DataExtractService {
     @Override
     public void cleanBuildTask() {
         if (Objects.nonNull(taskReference.getAcquire())) {
-            taskReference.getAcquire()
-                    .clear();
+            taskReference.getAcquire().clear();
         }
         TableExtractStatusCache.removeAll();
         atomicProcessNo.set(PROCESS_NO_RESET);
@@ -308,9 +301,8 @@ public class DataExtractServiceImpl implements DataExtractService {
             while (CollectionUtils.isEmpty(taskReference.get())) {
                 ThreadUtil.sleep(MAX_SLEEP_MILLIS_TIME);
                 if (sleepCount++ > MAX_SLEEP_COUNT) {
-                    LogUtils.info(log, "endpoint [{}] and process[{}}] task is empty!", extractProperties.getEndpoint()
-                                    .getDescription(),
-                            processNo);
+                    LogUtils.info(log, "endpoint [{}] and process[{}}] task is empty!",
+                        extractProperties.getEndpoint().getDescription(), processNo);
                     break;
                 }
             }
@@ -332,12 +324,11 @@ public class DataExtractServiceImpl implements DataExtractService {
             final String tableName = task.getTableName();
             if (!tableCheckStatus.containsKey(tableName) || tableCheckStatus.get(tableName) == -1) {
                 LogUtils.warn(log, "Abnormal table[{}] status, ignoring the current table data extraction task",
-                        tableName);
+                    tableName);
                 return;
             }
             Endpoint endpoint = extractProperties.getEndpoint();
-            while (!tableCheckPointCache.getAll()
-                    .containsKey(tableName)) {
+            while (!tableCheckPointCache.getAll().containsKey(tableName)) {
                 ThreadUtil.sleepHalfSecond();
             }
             List summarizedCheckPoint = tableCheckPointCache.get(tableName);
@@ -351,7 +342,7 @@ public class DataExtractServiceImpl implements DataExtractService {
     }
 
     private List buildSliceByTask(List summarizedCheckPoint, TableMetadata tableMetadata,
-                                           Endpoint endpoint) {
+        Endpoint endpoint) {
         List sliceVoList;
         if (noTableSlice(tableMetadata, summarizedCheckPoint)) {
             sliceVoList = buildSingleSlice(tableMetadata, endpoint);
@@ -366,16 +357,18 @@ public class DataExtractServiceImpl implements DataExtractService {
         sliceRegister.batchRegister(sliceVoList);
         if (sliceVoList.size() <= 20) {
             ExecutorService executorService = dynamicThreadPoolManager.getExecutor(EXTRACT_EXECUTOR);
-            LogUtils.debug(log, "table [{}] get executorService success", sliceVoList.get(0)
-                    .getTable());
-            sliceVoList.forEach(sliceVo -> executorService.submit(sliceFactory.createSliceProcessor(sliceVo)));
+            LogUtils.debug(log, "table [{}] get executorService success", sliceVoList.get(0).getTable());
+            sliceVoList.forEach(sliceVo -> {
+                executorService.submit(sliceFactory.createSliceProcessor(sliceVo));
+            });
         } else {
             int topicSize = ConfigCache.getIntValue(ConfigConstants.MAXIMUM_TOPIC_SIZE);
             int extendMaxPoolSize = ConfigCache.getIntValue(ConfigConstants.EXTEND_MAXIMUM_POOL_SIZE);
             ExecutorService extendExecutor = dynamicThreadPoolManager.getFreeExecutor(topicSize, extendMaxPoolSize);
-            LogUtils.debug(log, "table [{}] get extendExecutor success", sliceVoList.get(0)
-                    .getTable());
-            sliceVoList.forEach(sliceVo -> extendExecutor.submit(sliceFactory.createSliceProcessor(sliceVo)));
+            LogUtils.debug(log, "table [{}] get extendExecutor success", sliceVoList.get(0).getTable());
+            sliceVoList.forEach(sliceVo -> {
+                extendExecutor.submit(sliceFactory.createSliceProcessor(sliceVo));
+            });
         }
     }
 
@@ -431,7 +424,7 @@ public class DataExtractServiceImpl implements DataExtractService {
         List checkPointList;
         try {
             checkPointList = sliceStatement.getCheckPoint(metadata,
-                    ConfigCache.getIntValue(ConfigConstants.MAXIMUM_TABLE_SLICE_SIZE));
+                ConfigCache.getIntValue(ConfigConstants.MAXIMUM_TABLE_SLICE_SIZE));
         } catch (Exception ex) {
             LogUtils.error(log, "getCheckPoint error:", ex);
             return new ArrayList<>();
@@ -449,25 +442,14 @@ public class DataExtractServiceImpl implements DataExtractService {
             LogUtils.info(log, "start pollSwapPoint thread to register CheckPoint taskSize=" + taskList.size());
             checkPointManager.pollSwapPoint(tableCheckPointCache);
             Endpoint endpoint = ConfigCache.getEndPoint();
-            CountDownLatch countDownLatch = new CountDownLatch(taskList.size());
-            ExecutorService executorService = Executors.newFixedThreadPool(5);
             taskList.forEach(task -> {
-                executorService.submit(() -> {
-                    registerCheckPoint(task, endpoint);
-                    countDownLatch.countDown();
-                });
+                registerCheckPoint(task, endpoint);
             });
-            try {
-                countDownLatch.await();
-            } catch (InterruptedException e) {
-                LogUtils.warn(log, "tableRegisterCheckPoint CountDownLatch InterruptedException");
-            }
             LogUtils.info(log, "tableRegisterCheckPoint finished");
             while (tableCheckPointCache.tableCount() != taskList.size()) {
                 ThreadUtil.sleepHalfSecond();
             }
             checkPointManager.close();
-            executorService.shutdownNow();
             sliceRegister.stopCheckPointMonitor(ConfigCache.getEndPoint());
         }).start();
     }
@@ -483,17 +465,15 @@ public class DataExtractServiceImpl implements DataExtractService {
                 tableCheckPointCache.put(tableName, checkPointList);
             }
             checkPointManager.send(new CheckPointData().setTableName(tableName)
-                    .setDigit(checkPoint.checkPkNumber(task.getTableMetadata()))
-                    .setCheckPointList(checkPointList));
+                .setDigit(checkPoint.checkPkNumber(task.getTableMetadata()))
+                .setCheckPointList(checkPointList));
         } catch (Exception e) {
             log.error("register check point failed ", e);
         }
     }
 
     private String sliceTaskNameBuilder(@NonNull String tableName, int index) {
-        return TASK_NAME_PREFIX.concat(tableName)
-                .concat("_slice_")
-                .concat(String.valueOf(index + 1));
+        return TASK_NAME_PREFIX.concat(tableName).concat("_slice_").concat(String.valueOf(index + 1));
     }
 
     private void registerTopic(ExtractTask task) {
@@ -511,7 +491,7 @@ public class DataExtractServiceImpl implements DataExtractService {
     /**
      * Query table data
      *
-     * @param tableName     tableName
+     * @param tableName tableName
      * @param compositeKeys Review primary key set
      * @return Primary key corresponds to table data
      */
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/ExtractPointSwapManager.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/ExtractPointSwapManager.java
index ffdd607..a35691f 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/ExtractPointSwapManager.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/ExtractPointSwapManager.java
@@ -16,6 +16,7 @@
 package org.opengauss.datachecker.extract.slice;
 
 import com.alibaba.fastjson.JSONObject;
+
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.PartitionInfo;
@@ -56,7 +57,7 @@ public class ExtractPointSwapManager {
     private KafkaConsumerConfig kafkaConsumerConfig;
 
     public ExtractPointSwapManager(KafkaTemplate kafkaTemplate,
-                                   KafkaConsumerConfig kafkaConsumerConfig) {
+        KafkaConsumerConfig kafkaConsumerConfig) {
         this.kafkaTemplate = kafkaTemplate;
         this.endpoint = ConfigCache.getEndPoint();
         this.endpoint = ConfigCache.getEndPoint();
@@ -77,6 +78,7 @@ public class ExtractPointSwapManager {
             ConsumerRecords records;
             AtomicInteger deliveredCount = new AtomicInteger();
             LogUtils.info(log, "pollSwapPoint thread started");
+            int retryTimesWait = 0;
             while (!isCompletedSwapTablePoint) {
                 try {
                     records = kafkaConsumer.poll(Duration.ofSeconds(1));
@@ -87,11 +89,13 @@ public class ExtractPointSwapManager {
                                 tableCheckPointCache.put(pointData.getTableName(), translateDigitPoint(pointData));
                                 deliveredCount.getAndIncrement();
                                 LogUtils.info(log, "swap summarized checkpoint of table [{}]:[{}] ", deliveredCount,
-                                        pointData);
+                                    pointData.toString());
                             }
                         });
+                        ThreadUtil.sleepHalfSecond();
                     } else {
-                        ThreadUtil.sleepOneSecond();
+                        LogUtils.info(log, "wait swap summarized checkpoint of table {}...", ++retryTimesWait);
+                        ThreadUtil.sleepCircle(retryTimesWait);
                     }
                 } catch (Exception ex) {
                     if (Objects.equals("java.lang.InterruptedException", ex.getMessage())) {
@@ -102,17 +106,16 @@ public class ExtractPointSwapManager {
                 }
             }
             LogUtils.warn(log, "close check point swap consumer {} :{}", checkPointSwapTopicName,
-                    kafkaConsumer.groupMetadata()
-                            .groupId());
+                kafkaConsumer.groupMetadata().groupId());
             kafkaConsumerConfig.closeConsumer(kafkaConsumer);
         });
     }
 
     private List translateDigitPoint(CheckPointData pointData) {
         return pointData.isDigit() ? pointData.getCheckPointList()
-                .stream()
-                .map(obj -> Long.parseLong((String) obj))
-                .collect(Collectors.toList()) : pointData.getCheckPointList();
+            .stream()
+            .map(obj -> Long.parseLong((String) obj))
+            .collect(Collectors.toList()) : pointData.getCheckPointList();
     }
 
     private void trySubscribe() {
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/SliceProcessorContext.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/SliceProcessorContext.java
index d8bedb5..c061f28 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/SliceProcessorContext.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/SliceProcessorContext.java
@@ -31,12 +31,16 @@ import org.opengauss.datachecker.extract.task.sql.AutoSliceQueryStatement;
 import org.opengauss.datachecker.extract.task.sql.FullQueryStatement;
 import org.opengauss.datachecker.extract.task.sql.QueryStatementFactory;
 import org.opengauss.datachecker.extract.task.sql.SliceQueryStatement;
+import org.opengauss.datachecker.extract.task.sql.UnionPrimarySliceQueryStatement;
 import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
 import org.springframework.stereotype.Component;
 
 import javax.annotation.PreDestroy;
 import javax.annotation.Resource;
+
 import java.util.Objects;
+import java.util.concurrent.Future;
 
 /**
  * SliceProcessorContext
@@ -60,12 +64,23 @@ public class SliceProcessorContext {
     private KafkaConsumerConfig kafkaConsumerConfig;
     @Resource
     private CheckingFeignClient checkingFeignClient;
+    @Resource
+    private ThreadPoolTaskExecutor sliceSendExecutor;
     private SliceStatusFeedbackService sliceStatusFeedbackService;
 
     public void saveProcessing(SliceVo slice) {
         processLogService.saveProcessHistoryLogging(slice.getTable(), slice.getNo());
     }
 
+    /**
+     * async thread add threadPool
+     *
+     * @param sliceSendRunnable sliceSendRunnable
+     * @return future
+     */
+    public Future asyncSendSlice(Runnable sliceSendRunnable) {
+        return sliceSendExecutor.submit(sliceSendRunnable);
+    }
 
     /**
      * 销毁kafkaTemplate
@@ -79,7 +94,7 @@ public class SliceProcessorContext {
      * 创建分片kafka代理
      *
      * @param topicName topic 名称
-     * @param groupId   GroupID
+     * @param groupId GroupID
      * @return 分片kafka代理
      */
     public SliceKafkaAgents createSliceFixedKafkaAgents(String topicName, String groupId) {
@@ -123,6 +138,15 @@ public class SliceProcessorContext {
         return factory.createSliceQueryStatement();
     }
 
+    /**
+     * create slice query statement of union primary slice
+     *
+     * @return UnionPrimarySliceQueryStatement
+     */
+    public UnionPrimarySliceQueryStatement createSlicePageQueryStatement() {
+        return factory.createSlicePageQueryStatement();
+    }
+
     public AutoSliceQueryStatement createAutoSliceQueryStatement(TableMetadata tableMetadata) {
         CheckPoint checkPoint = new CheckPoint(baseDataService.getDataAccessService());
         return factory.createSliceQueryStatement(checkPoint, tableMetadata);
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/common/SliceResultSetSender.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/common/SliceResultSetSender.java
index da36dd6..88db524 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/common/SliceResultSetSender.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/common/SliceResultSetSender.java
@@ -55,7 +55,7 @@ public class SliceResultSetSender {
      * constructor
      *
      * @param tableMetadata tableMetadata
-     * @param kafkaOperate  kafkaOperate
+     * @param kafkaOperate kafkaOperate
      */
     public SliceResultSetSender(@NonNull TableMetadata tableMetadata, SliceKafkaAgents kafkaOperate) {
         this.resultSetHandler = new ResultSetHandlerFactory().createHandler(tableMetadata.getDataBaseType());
@@ -69,14 +69,13 @@ public class SliceResultSetSender {
     /**
      * resultSetTranslateAndSendSync
      *
-     * @param rsmd   rsmd
-     * @param rs     rs
-     * @param result result
-     * @param sNo    sNo
+     * @param rsmd rsmd
+     * @param rs rs
+     * @param sNo sNo
      */
     public ListenableFuture> resultSetTranslateAndSendSync(ResultSetMetaData rsmd,
-        ResultSet rs, Map result, int sNo) {
-        RowDataHash dataHash = resultSetTranslate(rsmd, rs, result, sNo);
+        ResultSet rs, int sNo) {
+        RowDataHash dataHash = resultSetTranslate(rsmd, rs, sNo);
         return kafkaOperate.sendRowDataSync(dataHash);
     }
 
@@ -93,19 +92,40 @@ public class SliceResultSetSender {
     /**
      * resultSetTranslate
      *
-     * @param rsmd   rsmd
-     * @param rs     rs
-     * @param result result
-     * @param sNo    sNo
+     * @param rsmd rsmd
+     * @param rs rs
+     * @param sNo sNo
      */
-    public RowDataHash resultSetTranslate(ResultSetMetaData rsmd, ResultSet rs, Map result, int sNo) {
-        resultSetHandler.putOneResultSetToMap(tableName, rsmd, rs, result);
-        RowDataHash dataHash = handler(primary, columns, result);
+    public RowDataHash resultSetTranslate(ResultSetMetaData rsmd, ResultSet rs, int sNo) {
+        RowDataHash dataHash = handler(primary, columns, resultSetHandler.putOneResultSetToMap(tableName, rsmd, rs));
         dataHash.setSNo(sNo);
-        result.clear();
         return dataHash;
     }
 
+    /**
+     * translate result set and send row kafka
+     *
+     * @param values result set
+     * @param sNo sn
+     * @return result
+     */
+    public ListenableFuture> resultSetTranslate(Map values, int sNo) {
+        RowDataHash dataHash = handler(primary, columns, values);
+        dataHash.setSNo(sNo);
+        return kafkaOperate.sendRowDataSync(dataHash);
+    }
+
+    /**
+     * resultSet read and parse
+     *
+     * @param rsmd rsmd
+     * @param resultSet rs
+     * @return parse result
+     */
+    public Map resultSet(ResultSetMetaData rsmd, ResultSet resultSet) {
+        return resultSetHandler.putOneResultSetToMap(tableName, rsmd, resultSet);
+    }
+
     /**
      * checkOffsetEnd
      *
@@ -138,12 +158,9 @@ public class SliceResultSetSender {
     private RowDataHash handler(List primary, List columns, Map rowData) {
         long rowHash = HASH_HANDLER.xx3Hash(rowData, columns);
         String primaryValue = HASH_HANDLER.value(rowData, primary);
-        long primaryHash = HASH_HANDLER.xx3Hash(rowData, primary);
+        long primaryHash = HASH_HANDLER.xx3Hash(primaryValue);
         RowDataHash hashData = new RowDataHash();
-        hashData.setKey(primaryValue)
-                .setKHash(primaryHash)
-                .setSliceKey(sliceKey)
-                .setVHash(rowHash);
+        hashData.setKey(primaryValue).setKHash(primaryHash).setSliceKey(sliceKey).setVHash(rowHash);
         return hashData;
     }
 
@@ -158,9 +175,9 @@ public class SliceResultSetSender {
      * csv mode, translate next line data to map and send it to kafka topic
      *
      * @param nextLine next line
-     * @param result   temp map
-     * @param rowIdx   row idx of csv file
-     * @param sNo      sNo
+     * @param result temp map
+     * @param rowIdx row idx of csv file
+     * @param sNo sNo
      */
     public void csvTranslateAndSend(String[] nextLine, Map result, int rowIdx, int sNo) {
         RowDataHash dataHash = csvTranslate(nextLine, result, rowIdx, sNo);
@@ -171,9 +188,9 @@ public class SliceResultSetSender {
      * csv mode, translate next line data to map and send it to kafka topic
      *
      * @param nextLine next line
-     * @param result   temp map
-     * @param rowIdx   row idx of csv file
-     * @param sNo      sNo
+     * @param result temp map
+     * @param rowIdx row idx of csv file
+     * @param sNo sNo
      */
     public ListenableFuture> csvTranslateAndSendSync(String[] nextLine,
         Map result, int rowIdx, int sNo) {
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/AbstractProcessor.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/AbstractProcessor.java
index 733fcc6..843a8b9 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/AbstractProcessor.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/AbstractProcessor.java
@@ -40,7 +40,7 @@ public abstract class AbstractProcessor implements SliceProcessor {
     /**
      * JDBC fetch size
      */
-    protected static final int FETCH_SIZE = 10000;
+    protected static final int FETCH_SIZE = 200;
 
     /**
      * log
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcSliceProcessor.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcSliceProcessor.java
index b90ad5d..450f868 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcSliceProcessor.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcSliceProcessor.java
@@ -16,7 +16,11 @@
 package org.opengauss.datachecker.extract.slice.process;
 
 import com.alibaba.druid.pool.DruidDataSource;
+
 import org.apache.logging.log4j.Logger;
+import org.opengauss.datachecker.common.config.ConfigCache;
+import org.opengauss.datachecker.common.constant.ConfigConstants;
+import org.opengauss.datachecker.common.entry.enums.DataBaseType;
 import org.opengauss.datachecker.common.entry.extract.SliceExtend;
 import org.opengauss.datachecker.common.entry.extract.SliceVo;
 import org.opengauss.datachecker.common.entry.extract.TableMetadata;
@@ -29,8 +33,9 @@ import org.opengauss.datachecker.extract.slice.common.SliceResultSetSender;
 import org.opengauss.datachecker.extract.task.sql.FullQueryStatement;
 import org.opengauss.datachecker.extract.task.sql.QuerySqlEntry;
 import org.opengauss.datachecker.extract.task.sql.SliceQueryStatement;
+import org.opengauss.datachecker.extract.task.sql.UnionPrimarySliceQueryStatement;
 import org.springframework.kafka.support.SendResult;
-import org.springframework.util.StopWatch;
+import org.springframework.util.Assert;
 import org.springframework.util.concurrent.ListenableFuture;
 
 import java.sql.Connection;
@@ -38,10 +43,12 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.TreeMap;
+import java.util.Objects;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 
 /**
@@ -60,7 +67,7 @@ public class JdbcSliceProcessor extends AbstractSliceProcessor {
     /**
      * JdbcSliceProcessor
      *
-     * @param slice   slice
+     * @param slice slice
      * @param context context
      */
     public JdbcSliceProcessor(SliceVo slice, SliceProcessorContext context, DruidDataSource dataSource) {
@@ -75,19 +82,30 @@ public class JdbcSliceProcessor extends AbstractSliceProcessor {
         TableMetadata tableMetadata = context.getTableMetaData(table);
         SliceExtend sliceExtend = createSliceExtend(tableMetadata.getTableHash());
         try {
-            QuerySqlEntry queryStatement = createQueryStatement(tableMetadata);
-            LogUtils.debug(log, "table [{}] query statement :  {}", table, queryStatement.getSql());
-            executeQueryStatement(queryStatement, tableMetadata, sliceExtend);
-        } catch (Exception ex) {
+            if (tableMetadata.isUnionPrimary()) {
+                DataBaseType dataBaseType = ConfigCache.getValue(ConfigConstants.DATA_BASE_TYPE, DataBaseType.class);
+                Assert.isTrue(isSuiteUnionPrimary(dataBaseType),
+                    "Union primary is not supported by current database type " + dataBaseType.getDescription());
+                executeSliceQueryStatementPage(tableMetadata, sliceExtend);
+            } else {
+                QuerySqlEntry queryStatement = createQueryStatement(tableMetadata);
+                LogUtils.debug(log, "table [{}] query statement :  {}", table, queryStatement.getSql());
+                executeQueryStatement(queryStatement, tableMetadata, sliceExtend);
+            }
+        } catch (Exception | Error ex) {
             sliceExtend.setStatus(-1);
             LogUtils.error(log, "table slice [{}] is error", slice.toSimpleString(), ex);
         } finally {
-            LogUtils.info(log, "table slice [{}] is finally ", slice.toSimpleString());
+            LogUtils.info(log, "table slice [{}] is finally  ", slice.toSimpleString());
             feedbackStatus(sliceExtend);
             context.saveProcessing(slice);
         }
     }
 
+    private boolean isSuiteUnionPrimary(DataBaseType dataBaseType) {
+        return Objects.equals(dataBaseType, DataBaseType.OG) || Objects.equals(dataBaseType, DataBaseType.MS);
+    }
+
     private QuerySqlEntry createQueryStatement(TableMetadata tableMetadata) {
         if (slice.isSlice()) {
             SliceQueryStatement sliceStatement = context.createSliceQueryStatement();
@@ -98,27 +116,144 @@ public class JdbcSliceProcessor extends AbstractSliceProcessor {
         }
     }
 
+    private void executeSliceQueryStatementPage(TableMetadata tableMetadata, SliceExtend sliceExtend) {
+        // 分片数据统计
+        UnionPrimarySliceQueryStatement sliceStatement = context.createSlicePageQueryStatement();
+        QuerySqlEntry sliceCountSql = sliceStatement.buildSliceCount(tableMetadata, slice);
+        int sliceCount = querySliceRowTotalCount(sliceExtend, sliceCountSql);
+        QuerySqlEntry baseSliceSql = sliceStatement.buildSlice(tableMetadata, slice);
+        List pageStatementList = sliceStatement.buildPageStatement(baseSliceSql, sliceCount,
+            slice.getFetchSize());
+        SliceResultSetSender sliceSender = null;
+        Connection connection = null;
+        try {
+            // 申请数据库链接
+            long estimatedRowCount = slice.isSlice() ? slice.getFetchSize() : tableMetadata.getTableRows();
+            long estimatedMemorySize = estimatedMemorySize(tableMetadata.getAvgRowLength(), estimatedRowCount);
+            connection = jdbcOperation.tryConnectionAndClosedAutoCommit(estimatedMemorySize, dataSource);
+            // 获取连接,准备查询分片数据: 并开启数据异步处理线程
+            sliceSender = createSliceResultSetSender(tableMetadata);
+            sliceSender.setRecordSendKey(slice.getName());
+            List offsetList = new CopyOnWriteArrayList<>();
+            List>> batchFutures = new CopyOnWriteArrayList<>();
+            AsyncDataHandler asyncHandler = new AsyncDataHandler(batchFutures, sliceSender, offsetList);
+            asyncHandler.start();
+            context.asyncSendSlice(asyncHandler);
+            // 开始查询数据,并将结果推送到异步处理线程中。
+            boolean isFirstStatement = true;
+            long startOffset = 0L;
+            for (String pageStatement : pageStatementList) {
+                if (isFirstStatement) {
+                    // only use first page statement's start offset
+                    startOffset = pageQueryUnionPrimarySlice(pageStatement, connection, sliceSender, asyncHandler);
+                } else {
+                    // other page statement's start offset is ignored
+                    pageQueryUnionPrimarySlice(pageStatement, connection, sliceSender, asyncHandler);
+                }
+                isFirstStatement = false;
+            }
+            sliceExtend.setStartOffset(startOffset);
+            waitToStopAsyncHandlerAndResources(asyncHandler);
+            updateExtendSliceOffsetAndCount(sliceExtend, rowCount.get(), offsetList);
+        } catch (Exception ex) {
+            LogUtils.error(log, "slice [{}] has exception :", slice.getName(), ex);
+            throw new ExtractDataAccessException(ex.getMessage());
+        } finally {
+            ConnectionMgr.close(connection, null, null);
+            if (sliceSender != null) {
+                sliceSender.agentsClosed();
+            }
+            jdbcOperation.releaseConnection(connection);
+            LogUtils.info(log, "query union primary slice and send data {} Count:{}", sliceExtend.getName(),
+                rowCount.get());
+        }
+    }
+
+    private long pageQueryUnionPrimarySlice(String pageStatement, Connection connection,
+        SliceResultSetSender sliceSender, AsyncDataHandler asyncHandler) throws SQLException, InterruptedException {
+        long startOffset;
+        PreparedStatement ps = connection.prepareStatement(pageStatement);
+        ps.setFetchSize(FETCH_SIZE);
+        ResultSet resultSet = ps.executeQuery();
+        startOffset = sliceSender.checkOffsetEnd();
+        ResultSetMetaData rsmd = resultSet.getMetaData();
+        while (resultSet.next()) {
+            this.rowCount.incrementAndGet();
+            if (asyncHandler.isSenderBusy()) {
+                Thread.sleep(100);
+            }
+            asyncHandler.addRow(sliceSender.resultSet(rsmd, resultSet));
+        }
+        // 数据发送到异步处理线程中,关闭ps与rs
+        ConnectionMgr.close(null, ps, resultSet);
+        return startOffset;
+    }
+
+    private static void waitToStopAsyncHandlerAndResources(AsyncDataHandler asyncHandler) {
+        // 全部分页查询处理完成,关闭数据库连接,并关闭异步数据处理线程
+        try {
+            asyncHandler.waitToStop();
+        } catch (InterruptedException e) {
+            throw new ExtractDataAccessException("slice data async handler is interrupted");
+        }
+    }
+
+    private int querySliceRowTotalCount(SliceExtend sliceExtend, QuerySqlEntry sliceCountSql) {
+        int sliceCount = 0;
+        try (Connection connection = jdbcOperation.tryConnectionAndClosedAutoCommit(1L, dataSource);
+            PreparedStatement ps = connection.prepareStatement(sliceCountSql.getSql());
+            ResultSet resultSet = ps.executeQuery();) {
+            if (resultSet.next()) {
+                sliceCount = resultSet.getInt(1);
+            }
+        } catch (SQLException ex) {
+            log.error("execute slice count query error ", ex);
+            throw new ExtractDataAccessException("execute slice count query error");
+        }
+        log.info("query union primary table slice {} Count:{}", sliceExtend.getName(), sliceCount);
+        return sliceCount;
+    }
+
     private void executeQueryStatement(QuerySqlEntry sqlEntry, TableMetadata tableMetadata, SliceExtend sliceExtend) {
-        StopWatch stopWatch = new StopWatch(slice.getName());
-        stopWatch.start("start " + slice.getName());
         SliceResultSetSender sliceSender = null;
         Connection connection = null;
         PreparedStatement ps = null;
         ResultSet resultSet = null;
         try {
+            // 申请数据库链接
             long estimatedRowCount = slice.isSlice() ? slice.getFetchSize() : tableMetadata.getTableRows();
             long estimatedMemorySize = estimatedMemorySize(tableMetadata.getAvgRowLength(), estimatedRowCount);
             connection = jdbcOperation.tryConnectionAndClosedAutoCommit(estimatedMemorySize, dataSource);
-            LogUtils.debug(log, "query slice and send data sql : {}", sqlEntry.getSql());
-            ps = connection.prepareStatement(sqlEntry.getSql());
-            resultSet = ps.executeQuery();
-            resultSet.setFetchSize(FETCH_SIZE);
+            // 获取连接,准备查询分片数据: 并开启数据异步处理线程
+            List offsetList = new CopyOnWriteArrayList<>();
+            List>> batchFutures = new CopyOnWriteArrayList<>();
             sliceSender = createSliceResultSetSender(tableMetadata);
             sliceSender.setRecordSendKey(slice.getName());
+            AsyncDataHandler asyncHandler = new AsyncDataHandler(batchFutures, sliceSender, offsetList);
+            asyncHandler.start();
+            context.asyncSendSlice(asyncHandler);
+            // 开始查询数据,并将结果推送到异步处理线程中。
+            ps = connection.prepareStatement(sqlEntry.getSql());
+            ps.setFetchSize(FETCH_SIZE);
+            resultSet = ps.executeQuery();
             sliceExtend.setStartOffset(sliceSender.checkOffsetEnd());
-            List offsetList = sliceQueryResultAndSendSync(sliceSender, resultSet);
+            ResultSetMetaData rsmd = resultSet.getMetaData();
+            while (resultSet.next()) {
+                this.rowCount.incrementAndGet();
+                if (asyncHandler.isSenderBusy()) {
+                    Thread.sleep(100);
+                }
+                // 数据发送到异步处理线程
+                asyncHandler.addRow(sliceSender.resultSet(rsmd, resultSet));
+            }
+            // 全部分片查询处理完成,关闭数据库连接,并关闭异步数据处理线程 ,关闭ps与rs
+            try {
+                ConnectionMgr.close(null, ps, resultSet);
+                asyncHandler.waitToStop();
+            } catch (InterruptedException e) {
+                throw new RuntimeException(e);
+            }
             updateExtendSliceOffsetAndCount(sliceExtend, rowCount.get(), offsetList);
-            stopWatch.stop();
         } catch (Exception ex) {
             LogUtils.error(log, "slice [{}] has exception :", slice.getName(), ex);
             throw new ExtractDataAccessException(ex.getMessage());
@@ -128,29 +263,90 @@ public class JdbcSliceProcessor extends AbstractSliceProcessor {
                 sliceSender.agentsClosed();
             }
             jdbcOperation.releaseConnection(connection);
-            LogUtils.info(log, "query slice and send data cost: {}", stopWatch.shortSummary());
+            LogUtils.info(log, "query slice and send data count {}", rowCount.get());
         }
     }
 
-    private List sliceQueryResultAndSendSync(SliceResultSetSender sliceSender, ResultSet resultSet)
-        throws SQLException {
-        ResultSetMetaData rsmd = resultSet.getMetaData();
-        Map result = new TreeMap<>();
-        List offsetList = new LinkedList<>();
-        List>> batchFutures = new LinkedList<>();
-        while (resultSet.next()) {
-            this.rowCount.incrementAndGet();
-            batchFutures.add(sliceSender.resultSetTranslateAndSendSync(rsmd, resultSet, result, slice.getNo()));
-            if (batchFutures.size() == FETCH_SIZE) {
+    /**
+     * async data handler thread
+     */
+    class AsyncDataHandler implements Runnable {
+        private final List>> batchFutures;
+        private final SliceResultSetSender sliceSender;
+        private final int maxQueueSize = 10000;
+        private final BlockingQueue> batchData = new LinkedBlockingQueue<>();
+        private final List offsetList;
+
+        private boolean canStartFetchRow = false;
+
+        AsyncDataHandler(List>> batchFutures,
+            SliceResultSetSender sliceSender, List offsetList) {
+            this.batchFutures = batchFutures;
+            this.sliceSender = sliceSender;
+            this.offsetList = offsetList;
+        }
+
+        /**
+         * start async data handler thread
+         */
+        public void start() {
+            this.canStartFetchRow = true;
+        }
+
+        /**
+         * add row to batch handler queue
+         *
+         * @param row row
+         */
+        public void addRow(Map row) {
+            this.batchData.add(row);
+        }
+
+        /**
+         * wait queue empty to stop
+         *
+         * @throws InterruptedException InterruptedException
+         */
+        public void waitToStop() throws InterruptedException {
+            while (!batchData.isEmpty()) {
+                Thread.sleep(100);
+            }
+            this.canStartFetchRow = false;
+        }
+
+        @Override
+        public void run() {
+            log.info("start send slice row {}", slice.getName());
+            while (canStartFetchRow) {
+                if (Objects.isNull(batchData.peek())) {
+                    try {
+                        Thread.sleep(100);
+                    } catch (InterruptedException ignore) {
+                    }
+                } else {
+                    Map value = batchData.poll();
+                    batchFutures.add(sliceSender.resultSetTranslate(value, slice.getNo()));
+                    if (batchFutures.size() == FETCH_SIZE) {
+                        offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
+                        batchFutures.clear();
+                    }
+                }
+            }
+            if (batchFutures.size() > 0) {
                 offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
                 batchFutures.clear();
             }
         }
-        if (batchFutures.size() > 0) {
-            offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
-            batchFutures.clear();
+
+        /**
+         * check  sender is busy , if busy return true , else return false
+         * batch queue size >= maxQueueSize return true , else return false
+         *
+         * @return boolean
+         */
+        public boolean isSenderBusy() {
+            return batchData.size() >= maxQueueSize;
         }
-        return offsetList;
     }
 
     private SliceResultSetSender createSliceResultSetSender(TableMetadata tableMetadata) {
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcTableProcessor.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcTableProcessor.java
index 4a034d8..64cfa8b 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcTableProcessor.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/slice/process/JdbcTableProcessor.java
@@ -38,8 +38,6 @@ import java.time.Duration;
 import java.time.LocalDateTime;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
 
 /**
  * JdbcTableProcessor
@@ -110,17 +108,16 @@ public class JdbcTableProcessor extends AbstractTableProcessor {
                     ResultSet resultSet = ps.executeQuery()) {
                     resultSet.setFetchSize(fetchSize);
                     ResultSetMetaData rsmd = resultSet.getMetaData();
-                    Map result = new TreeMap<>();
                     int rowCount = 0;
                     while (resultSet.next()) {
                         rowCount++;
-                        batchFutures.add(sliceSender.resultSetTranslateAndSendSync(rsmd, resultSet, result, i));
+                        batchFutures.add(sliceSender.resultSetTranslateAndSendSync(rsmd, resultSet, i));
                         if (batchFutures.size() == FETCH_SIZE) {
                             offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
                             batchFutures.clear();
                         }
                     }
-                    if (batchFutures.size() > 0) {
+                    if (!batchFutures.isEmpty()) {
                         offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
                         batchFutures.clear();
                     }
@@ -162,16 +159,15 @@ public class JdbcTableProcessor extends AbstractTableProcessor {
                 ResultSet resultSet = ps.executeQuery()) {
                 resultSet.setFetchSize(fetchSize);
                 ResultSetMetaData rsmd = resultSet.getMetaData();
-                Map result = new TreeMap<>();
                 while (resultSet.next()) {
                     tableRowCount++;
-                    batchFutures.add(sliceSender.resultSetTranslateAndSendSync(rsmd, resultSet, result, 0));
+                    batchFutures.add(sliceSender.resultSetTranslateAndSendSync(rsmd, resultSet, 0));
                     if (batchFutures.size() == FETCH_SIZE) {
                         offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
                         batchFutures.clear();
                     }
                 }
-                if (batchFutures.size() > 0) {
+                if (!batchFutures.isEmpty()) {
                     offsetList.add(getBatchFutureRecordOffsetScope(batchFutures));
                     batchFutures.clear();
                 }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/CheckPoint.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/CheckPoint.java
index 49d3a3b..674e50d 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/CheckPoint.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/CheckPoint.java
@@ -17,6 +17,7 @@ package org.opengauss.datachecker.extract.task;
 
 import com.alibaba.druid.pool.DruidDataSource;
 import com.alibaba.druid.pool.DruidPooledConnection;
+
 import org.apache.logging.log4j.Logger;
 import org.opengauss.datachecker.common.config.ConfigCache;
 import org.opengauss.datachecker.common.constant.ConfigConstants;
@@ -66,7 +67,7 @@ public class CheckPoint {
      * init table CheckPoint List
      *
      * @param tableMetadata tableMetadata
-     * @param slice         slice
+     * @param slice slice
      * @return check point
      */
     public List initCheckPointList(TableMetadata tableMetadata, int slice) {
@@ -80,16 +81,14 @@ public class CheckPoint {
         stopWatch.start();
         DataBaseType dataBaseType = ConfigCache.getValue(ConfigConstants.DATA_BASE_TYPE, DataBaseType.class);
         DataAccessParam param = new DataAccessParam().setSchema(SqlUtil.escape(schema, dataBaseType))
-                                                     .setName(SqlUtil.escape(tableName, dataBaseType))
-                                                     .setColName(SqlUtil.escape(pkName, dataBaseType));
+            .setName(SqlUtil.escape(tableName, dataBaseType))
+            .setColName(SqlUtil.escape(pkName, dataBaseType));
         Connection connection = getConnection();
         param.setOffset(slice);
         Object maxPoint = dataAccessService.max(connection, param);
         List checkPointList = dataAccessService.queryPointList(connection, param);
         checkPointList.add(maxPoint);
-        checkPointList = checkPointList.stream()
-                                       .distinct()
-                                       .collect(Collectors.toList());
+        checkPointList = checkPointList.stream().distinct().collect(Collectors.toList());
         stopWatch.stop();
         LogUtils.info(log, "init check-point-list table [{}]:[{}] ", tableName, stopWatch.shortSummary());
         ConnectionMgr.close(connection);
@@ -112,15 +111,12 @@ public class CheckPoint {
     }
 
     public boolean checkPkNumber(TableMetadata tableMetadata) {
-        ColumnsMetaData pkColumn = tableMetadata.getPrimaryMetas()
-                                                .get(0);
+        ColumnsMetaData pkColumn = tableMetadata.getPrimaryMetas().get(0);
         return MetaDataUtil.isDigitPrimaryKey(pkColumn);
     }
 
     private String getPkName(TableMetadata tableMetadata) {
-        return tableMetadata.getPrimaryMetas()
-                            .get(0)
-                            .getColumnName();
+        return tableMetadata.getPrimaryMetas().get(0).getColumnName();
     }
 
     public Long[][] translateBetween(List checkPointList) {
@@ -128,8 +124,8 @@ public class CheckPoint {
         for (int i = 0; i < between.length; i++) {
             String value = (String) checkPointList.get(i);
             String value2 = (String) checkPointList.get(i + 1);
-            between[i][0] = Long.parseLong(value);
-            between[i][1] = Long.parseLong(value2);
+            between[i][0] = Objects.isNull(value) ? null : Long.parseLong(value);
+            between[i][1] = Objects.isNull(value2) ? null : Long.parseLong(value2);
         }
         return between;
     }
@@ -150,8 +146,8 @@ public class CheckPoint {
     public long queryMaxIdOfAutoIncrementTable(TableMetadata tableMetadata) {
         DataAccessParam param = new DataAccessParam();
         param.setSchema(tableMetadata.getSchema())
-             .setName(tableMetadata.getTableName())
-             .setColName(getPkName(tableMetadata));
+            .setName(tableMetadata.getTableName())
+            .setColName(getPkName(tableMetadata));
         Connection connection = ConnectionMgr.getConnection();
         String maxId = dataAccessService.max(connection, param);
         ConnectionMgr.close(connection, null, null);
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ExtractTaskRunnable.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ExtractTaskRunnable.java
index d5ffb07..b2a4eae 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ExtractTaskRunnable.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ExtractTaskRunnable.java
@@ -17,6 +17,7 @@ package org.opengauss.datachecker.extract.task;
 
 import com.alibaba.druid.pool.DruidDataSource;
 import com.alibaba.fastjson.JSON;
+
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.logging.log4j.Logger;
@@ -47,6 +48,7 @@ import org.springframework.kafka.core.KafkaTemplate;
 import org.springframework.lang.NonNull;
 
 import javax.sql.DataSource;
+
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -92,13 +94,13 @@ public class ExtractTaskRunnable implements Runnable {
      * Thread Constructor
      *
      * @param processNo processNo
-     * @param task      task information
-     * @param support   Thread helper class
+     * @param task task information
+     * @param support Thread helper class
      */
     public ExtractTaskRunnable(String processNo, ExtractTask task, ExtractThreadSupport support) {
         this.task = task;
-        JdbcDataExtractionOperations jdbcOperate =
-            new JdbcDataExtractionOperations(support.getDataSource(), support.getResourceManager());
+        JdbcDataExtractionOperations jdbcOperate = new JdbcDataExtractionOperations(support.getDataSource(),
+            support.getResourceManager());
         this.jdbcOperation = new AtomicReference<>(jdbcOperate);
         this.checkingFeignClient = support.getCheckingFeignClient();
         this.dynamicThreadPoolManager = support.getDynamicThreadPoolManager();
@@ -122,15 +124,13 @@ public class ExtractTaskRunnable implements Runnable {
                 executeMultiTaskOffset(context);
             }
             checkingFeignClient.refreshTableExtractStatus(task.getTableName(), extractContext.getEndpoint(),
-                extractContext.getEndpoint()
-                              .getCode());
+                extractContext.getEndpoint().getCode());
             log.info("refresh table {} extract status success", task.getTableName());
         } catch (Exception ex) {
             checkingFeignClient.refreshTableExtractStatus(task.getTableName(), extractContext.getEndpoint(), -1);
             log.error("extract", ex);
         } finally {
-            Runtime.getRuntime()
-                   .gc();
+            Runtime.getRuntime().gc();
         }
     }
 
@@ -141,19 +141,15 @@ public class ExtractTaskRunnable implements Runnable {
         try {
             FullQueryStatement fullQueryStatement = factory.createFullQueryStatement();
             QuerySqlEntry querySqlEntry = fullQueryStatement.builderByTaskOffset(context.getTableMetadata());
-            connection = jdbcOperation.get()
-                                      .tryConnectionAndClosedAutoCommit(context.evaluateMemorySize());
-            rowCount = jdbcOperation.get()
-                                    .resultSetHandler(connection, querySqlEntry, context, FETCH_SIZE);
+            connection = jdbcOperation.get().tryConnectionAndClosedAutoCommit(context.evaluateMemorySize());
+            rowCount = jdbcOperation.get().resultSetHandler(connection, querySqlEntry, context, FETCH_SIZE);
         } catch (SQLException ex) {
             log.error("jdbc query  {} error : {}", context.getTableName(), ex.getMessage());
             throw new ExtractDataAccessException();
         } finally {
-            jdbcOperation.get()
-                         .releaseConnection(connection);
+            jdbcOperation.get().releaseConnection(connection);
             log.info("query table [{}] row-count [{}] cost [{}] milliseconds", context.getTableName(), rowCount,
-                Duration.between(start, LocalDateTime.now())
-                        .toMillis());
+                Duration.between(start, LocalDateTime.now()).toMillis());
             logNumberOfGlobalTasks(context.getTableName(), null,
                 dynamicThreadPoolManager.getExecutor(DynamicTpConstant.EXTRACT_EXECUTOR));
         }
@@ -164,8 +160,8 @@ public class ExtractTaskRunnable implements Runnable {
         int slice = extractContext.getMaximumTableSliceSize();
         long totalRows = 0;
         try {
-            AutoSliceQueryStatement sliceStatement =
-                factory.createSliceQueryStatement(checkPoint, context.getTableMetadata());
+            AutoSliceQueryStatement sliceStatement = factory.createSliceQueryStatement(checkPoint,
+                context.getTableMetadata());
             List querySqlList = sliceStatement.builderByTaskOffset(context.getTableMetadata(), slice);
             if (CollectionUtils.isNotEmpty(querySqlList)) {
                 totalRows = executeParallelTask(querySqlList, context);
@@ -176,8 +172,7 @@ public class ExtractTaskRunnable implements Runnable {
             throw new ExtractDataAccessException();
         } finally {
             log.info("table [{}] execution [{}] rows completed, taking a total of {} milliseconds",
-                context.getTableName(), totalRows, Duration.between(start, LocalDateTime.now())
-                                                           .toMillis());
+                context.getTableName(), totalRows, Duration.between(start, LocalDateTime.now()).toMillis());
         }
     }
 
@@ -194,25 +189,23 @@ public class ExtractTaskRunnable implements Runnable {
                 Connection connection = null;
                 try {
                     connection = jdbcOperation.get()
-                                              .tryConnectionAndClosedAutoCommit(context.evaluateMemorySize(sliceSize));
+                        .tryConnectionAndClosedAutoCommit(context.evaluateMemorySize(sliceSize));
                     totalRowCount.addAndGet(jdbcOperation.get()
-                                                         .resultSetHandlerParallelContext(connection, queryEntry,
-                                                             context, FETCH_SIZE));
+                        .resultSetHandlerParallelContext(connection, queryEntry, context, FETCH_SIZE));
                 } catch (SQLException ex) {
                     exceptionCount.incrementAndGet();
                     log.error("jdbc parallel query [{}] error : {}", queryEntry.getSql(), ex.getMessage());
                     throw new ExtractDataAccessException();
                 } finally {
                     countDown(context.getTableName(), countDownLatch, executor);
-                    jdbcOperation.get()
-                                 .releaseConnection(connection);
+                    jdbcOperation.get().releaseConnection(connection);
                 }
             });
         });
         countDownLatch.await();
         if (exceptionCount.get() > 0) {
-            String msg =
-                "Table " + context.getTableName() + " parallel query has " + exceptionCount.get() + " task exception";
+            String msg = "Table " + context.getTableName() + " parallel query has " + exceptionCount.get()
+                + " task exception";
             log.error(msg);
             throw new ExtractDataAccessException(msg);
         }
@@ -221,30 +214,26 @@ public class ExtractTaskRunnable implements Runnable {
 
     private void seekExtractTableInfo(TableMetadata tableMetadata) {
         log.info("table [{}] isAutoIncrement=[{}] , column=[{}] , avgRowLength=[{}] , tableRows=[{}] ",
-            tableMetadata.getTableName(), tableMetadata.isAutoIncrement(), tableMetadata.getColumnsMetas()
-                                                                                        .size(),
+            tableMetadata.getTableName(), tableMetadata.isAutoIncrement(), tableMetadata.getColumnsMetas().size(),
             tableMetadata.getAvgRowLength(), tableMetadata.getTableRows());
         log.info("table [{}] table column metadata -> {}", tableMetadata.getTableName(),
             getTableColumnInformation(tableMetadata));
     }
 
     private void enableParallelQueryDop(int taskOffset) throws SQLException {
-        int dop = Math.min(taskOffset, jdbcOperation.get()
-                                                    .getParallelQueryDop());
-        jdbcOperation.get()
-                     .enableDatabaseParallelQuery(dop);
+        int dop = Math.min(taskOffset, jdbcOperation.get().getParallelQueryDop());
+        jdbcOperation.get().enableDatabaseParallelQuery(dop);
     }
 
     private String getTableColumnInformation(TableMetadata tableMetadata) {
         return tableMetadata.getColumnsMetas()
-                            .stream()
-                            .map(ColumnsMetaData::getColumnMsg)
-                            .collect(Collectors.joining(" , "));
+            .stream()
+            .map(ColumnsMetaData::getColumnMsg)
+            .collect(Collectors.joining(" , "));
     }
 
     private boolean isNotSlice() {
-        return taskUtilHelper.noTableSlice() || jdbcOperation.get()
-                                                             .getParallelQueryDop() == 1;
+        return taskUtilHelper.noTableSlice() || jdbcOperation.get().getParallelQueryDop() == 1;
     }
 
     private void countDown(String tableName, CountDownLatch countDownLatch, ThreadPoolExecutor executor) {
@@ -277,7 +266,7 @@ public class ExtractTaskRunnable implements Runnable {
         /**
          * constructor
          *
-         * @param jdbcDataSource  datasource
+         * @param jdbcDataSource datasource
          * @param resourceManager resourceManager
          */
         public JdbcDataExtractionOperations(DataSource jdbcDataSource, ResourceManager resourceManager) {
@@ -334,9 +323,9 @@ public class ExtractTaskRunnable implements Runnable {
          * use a jdbc connection to query sql ,and parse and hash query result.then hash result send kafka topic
          *
          * @param connection connection
-         * @param sqlEntry   sqlEntry
-         * @param context    context
-         * @param fetchSize  fetchSize
+         * @param sqlEntry sqlEntry
+         * @param context context
+         * @param fetchSize fetchSize
          * @return resultSize
          * @throws SQLException SQLException
          */
@@ -352,13 +341,13 @@ public class ExtractTaskRunnable implements Runnable {
                 Map result = new TreeMap<>();
                 if (kafkaOperate.isMultiplePartitions()) {
                     while (resultSet.next()) {
-                        context.resultSetMultiplePartitionsHandler(rsmd, resultSet, result);
+                        context.resultSetMultiplePartitionsHandler(rsmd, resultSet);
                         rowCount++;
                         logProcessTableRowNum(context, sqlEntry, rowCount);
                     }
                 } else {
                     while (resultSet.next()) {
-                        context.resultSetSinglePartitionHandler(rsmd, resultSet, result);
+                        context.resultSetSinglePartitionHandler(rsmd, resultSet);
                         rowCount++;
                         logProcessTableRowNum(context, sqlEntry, rowCount);
                     }
@@ -372,10 +361,8 @@ public class ExtractTaskRunnable implements Runnable {
         private void logSliceQueryInfo(QuerySqlEntry sqlEntry, LocalDateTime start, int rowCount,
             LocalDateTime endQuery, LocalDateTime end) {
             log.info(" extract table {} , row-count=[{}] completed , cost=[ query={} send={} all={} ]",
-                sqlEntry.toString(), rowCount, Duration.between(start, endQuery)
-                                                       .toMillis(), Duration.between(endQuery, end)
-                                                                            .toMillis(), Duration.between(start, end)
-                                                                                                 .toMillis());
+                sqlEntry.toString(), rowCount, Duration.between(start, endQuery).toMillis(),
+                Duration.between(endQuery, end).toMillis(), Duration.between(start, end).toMillis());
         }
 
         private void logProcessTableRowNum(QueryTableRowContext context, QuerySqlEntry sqlEntry, int rowCount) {
@@ -389,9 +376,9 @@ public class ExtractTaskRunnable implements Runnable {
          * use a jdbc connection to query sql ,and parse and hash query result.then hash result send kafka topic
          *
          * @param connection connection
-         * @param sqlEntry   sqlEntry
-         * @param context    context
-         * @param fetchSize  fetchSize
+         * @param sqlEntry sqlEntry
+         * @param context context
+         * @param fetchSize fetchSize
          * @return resultSize
          * @throws SQLException SQLException
          */
@@ -551,7 +538,7 @@ public class ExtractTaskRunnable implements Runnable {
          * constructor
          *
          * @param tableMetadata tableMetadata
-         * @param kafkaOperate  kafkaOperate
+         * @param kafkaOperate kafkaOperate
          */
         QueryTableRowContext(@NonNull TableMetadata tableMetadata, KafkaOperations kafkaOperate) {
             this.resultSetHandler = new ResultSetHandlerFactory().createHandler(tableMetadata.getDataBaseType());
@@ -566,11 +553,9 @@ public class ExtractTaskRunnable implements Runnable {
          *
          * @param rs rs
          */
-        public void resultSetMultiplePartitionsHandler(ResultSetMetaData rsmd, ResultSet rs,
-            Map result) {
-            resultSetHandler.putOneResultSetToMap(getTableName(), rsmd, rs, result);
-            RowDataHash dataHash = handler(primary, columns, result);
-            result.clear();
+        public void resultSetMultiplePartitionsHandler(ResultSetMetaData rsmd, ResultSet rs) {
+            RowDataHash dataHash = handler(primary, columns,
+                resultSetHandler.putOneResultSetToMap(getTableName(), rsmd, rs));
             kafkaOperate.sendMultiplePartitionsRowData(dataHash);
         }
 
@@ -579,10 +564,9 @@ public class ExtractTaskRunnable implements Runnable {
          *
          * @param rs rs
          */
-        public void resultSetSinglePartitionHandler(ResultSetMetaData rsmd, ResultSet rs, Map result) {
-            resultSetHandler.putOneResultSetToMap(getTableName(), rsmd, rs, result);
-            RowDataHash dataHash = handler(primary, columns, result);
-            result.clear();
+        public void resultSetSinglePartitionHandler(ResultSetMetaData rsmd, ResultSet rs) {
+            RowDataHash dataHash = handler(primary, columns,
+                resultSetHandler.putOneResultSetToMap(getTableName(), rsmd, rs));
             kafkaOperate.sendSinglePartitionRowData(dataHash);
         }
 
@@ -604,9 +588,7 @@ public class ExtractTaskRunnable implements Runnable {
             String primaryValue = hashHandler.value(rowData, primary);
             long primaryHash = hashHandler.xx3Hash(rowData, primary);
             RowDataHash hashData = new RowDataHash();
-            hashData.setKey(primaryValue)
-                    .setKHash(primaryHash)
-                    .setVHash(rowHash);
+            hashData.setKey(primaryValue).setKHash(primaryHash).setVHash(rowHash);
             return hashData;
         }
 
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ResultSetHandler.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ResultSetHandler.java
index 60d2c21..1f1bb73 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ResultSetHandler.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/ResultSetHandler.java
@@ -75,28 +75,27 @@ public abstract class ResultSetHandler {
      * Convert the current query result set into map according to the metadata information of the result set
      *
      * @param tableName JDBC Data query table
-     * @param rsmd      JDBC Data query result set
+     * @param rsmd JDBC Data query result set
      * @param resultSet JDBC Data query result set
-     * @param values    values
      * @return JDBC Data encapsulation results
      */
-    public Map putOneResultSetToMap(final String tableName, ResultSetMetaData rsmd, ResultSet resultSet,
-        Map values) {
+    public Map putOneResultSetToMap(final String tableName, ResultSetMetaData rsmd,
+        ResultSet resultSet) {
+        Map result = new TreeMap<>();
         try {
-            IntStream.rangeClosed(1, rsmd.getColumnCount())
-                     .forEach(columnIdx -> {
-                         String columnLabel = null;
-                         try {
-                             columnLabel = rsmd.getColumnLabel(columnIdx);
-                             values.put(columnLabel, convert(resultSet, columnIdx, rsmd));
-                         } catch (SQLException ex) {
-                             LOG.error(" Convert data [{}:{}] {} error ", tableName, columnLabel, ex.getMessage(), ex);
-                         }
-                     });
+            IntStream.rangeClosed(1, rsmd.getColumnCount()).forEach(columnIdx -> {
+                String columnLabel = null;
+                try {
+                    columnLabel = rsmd.getColumnLabel(columnIdx);
+                    result.put(columnLabel, convert(resultSet, columnIdx, rsmd));
+                } catch (SQLException ex) {
+                    LOG.error(" Convert data [{}:{}] {} error ", tableName, columnLabel, ex.getMessage(), ex);
+                }
+            });
         } catch (SQLException ex) {
             LOG.error(" parse data metadata information exception", ex);
         }
-        return values;
+        return result;
     }
 
     /**
@@ -108,7 +107,7 @@ public abstract class ResultSetHandler {
     public Map putOneResultSetToMap(ResultSet resultSet) throws SQLException {
         final ResultSetMetaData rsmd = resultSet.getMetaData();
         String tableName = rsmd.getTableName(1);
-        return putOneResultSetToMap(tableName, rsmd, resultSet, new TreeMap<>());
+        return putOneResultSetToMap(tableName, rsmd, resultSet);
     }
 
     /**
@@ -116,7 +115,7 @@ public abstract class ResultSetHandler {
      *
      * @param resultSet resultSet
      * @param columnIdx columnIdx
-     * @param rsmd      rsmd
+     * @param rsmd rsmd
      * @return result
      * @throws SQLException SQLException
      */
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/QueryStatementFactory.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/QueryStatementFactory.java
index c15698e..c960b45 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/QueryStatementFactory.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/QueryStatementFactory.java
@@ -31,9 +31,9 @@ public class QueryStatementFactory {
     /**
      * create SliceQueryStatement
      *
-     * @param checkPoint    checkPoint
+     * @param checkPoint checkPoint
      * @param tableMetadata tableMetadata
-     * @return SliceQueryStatement
+     * @return A new AutoSliceQueryStatement instance.
      */
     public AutoSliceQueryStatement createSliceQueryStatement(CheckPoint checkPoint, TableMetadata tableMetadata) {
         return new SinglePrimaryAutoSliceQueryStatement(checkPoint);
@@ -42,12 +42,21 @@ public class QueryStatementFactory {
     /**
      * create slice query statement of single primary slice
      *
-     * @return SliceQueryStatement
+     * @return A new SinglePrimarySliceQueryStatement instance.
      */
     public SliceQueryStatement createSliceQueryStatement() {
         return new SinglePrimarySliceQueryStatement();
     }
 
+    /**
+     * create slice query statement of union primary slice
+     *
+     * @return A new UnionPrimarySliceQueryStatement instance.
+     */
+    public UnionPrimarySliceQueryStatement createSlicePageQueryStatement() {
+        return new UnionPrimarySliceQueryStatement();
+    }
+
     /**
      * create FullQueryStatement
      *
@@ -56,8 +65,7 @@ public class QueryStatementFactory {
     public FullQueryStatement createFullQueryStatement() {
         return tableMetadata -> {
             final SelectSqlBuilder sqlBuilder = new SelectSqlBuilder(tableMetadata);
-            String fullSql = sqlBuilder.isDivisions(false).isCsvMode(ConfigCache.isCsvMode())
-                                       .builder();
+            String fullSql = sqlBuilder.isDivisions(false).isCsvMode(ConfigCache.isCsvMode()).builder();
             return new QuerySqlEntry(tableMetadata.getTableName(), fullSql, 0, tableMetadata.getTableRows());
         };
     }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SelectSqlBuilder.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SelectSqlBuilder.java
index 9ad37a8..f3e16b8 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SelectSqlBuilder.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SelectSqlBuilder.java
@@ -16,6 +16,7 @@
 package org.opengauss.datachecker.extract.task.sql;
 
 import lombok.Getter;
+
 import org.apache.commons.lang3.StringUtils;
 import org.opengauss.datachecker.common.entry.enums.DataBaseType;
 import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData;
@@ -50,35 +51,35 @@ import static org.opengauss.datachecker.extract.task.sql.QuerySqlTemplate.TABLE_
  * @since 11
  **/
 public class SelectSqlBuilder {
+    private static final String QUERY_WHERE_BETWEEN
+        = "SELECT :columnsList FROM :schema.:tableName where :pkCondition :orderBy ";
     private static final Map SQL_GENERATE = new HashMap<>();
-    private static final SqlGenerateTemplate GENERATE_TEMPLATE =
-        (template, sqlGenerateMeta) -> template.replace(COLUMN, sqlGenerateMeta.getColumns())
-                                               .replace(SCHEMA, sqlGenerateMeta.getSchema())
-                                               .replace(TABLE_NAME, sqlGenerateMeta.getTableName())
-                                               .replace(ORDER_BY, sqlGenerateMeta.getOrder())
-                                               .replace(START, String.valueOf(sqlGenerateMeta.getStart()))
-                                               .replace(OFFSET, String.valueOf(sqlGenerateMeta.getOffset()));
-    private static final SqlGenerateTemplate NO_OFFSET_SQL_GENERATE_TEMPLATE =
-        (template, sqlGenerateMeta) -> template.replace(COLUMN, sqlGenerateMeta.getColumns())
-                                               .replace(SCHEMA, sqlGenerateMeta.getSchema())
-                                               .replace(TABLE_NAME, sqlGenerateMeta.getTableName());
-    private static final SqlGenerate OFFSET_GENERATE =
-        (sqlGenerateMeta) -> GENERATE_TEMPLATE.replace(QuerySqlTemplate.QUERY_OFF_SET, sqlGenerateMeta);
-    private static final SqlGenerate NO_OFFSET_GENERATE =
-        (sqlGenerateMeta) -> NO_OFFSET_SQL_GENERATE_TEMPLATE.replace(QuerySqlTemplate.QUERY_NO_OFF_SET,
-            sqlGenerateMeta);
-
-    private static final SqlGenerateTemplate QUERY_BETWEEN_TEMPLATE =
-        (template, sqlGenerateMeta) -> template.replace(COLUMN, sqlGenerateMeta.getColumns())
-                                               .replace(SCHEMA, sqlGenerateMeta.getSchema())
-                                               .replace(TABLE_NAME, sqlGenerateMeta.getTableName())
-                                               .replace(ORDER_BY, sqlGenerateMeta.getOrder())
-                                               .replace(PRIMARY_KEY, sqlGenerateMeta.getPrimaryKey())
-                                               .replace(START, String.valueOf(sqlGenerateMeta.getStart()))
-                                               .replace(OFFSET, String.valueOf(
-                                                   sqlGenerateMeta.getStart() + sqlGenerateMeta.getOffset() - 1));
-    private static final SqlGenerate QUERY_BETWEEN_GENERATE =
-        (sqlGenerateMeta -> QUERY_BETWEEN_TEMPLATE.replace(QUERY_BETWEEN_SET, sqlGenerateMeta));
+    private static final SqlGenerateTemplate GENERATE_TEMPLATE = (template, sqlGenerateMeta) -> template.replace(COLUMN,
+            sqlGenerateMeta.getColumns())
+        .replace(SCHEMA, sqlGenerateMeta.getSchema())
+        .replace(TABLE_NAME, sqlGenerateMeta.getTableName())
+        .replace(ORDER_BY, sqlGenerateMeta.getOrder())
+        .replace(START, String.valueOf(sqlGenerateMeta.getStart()))
+        .replace(OFFSET, String.valueOf(sqlGenerateMeta.getOffset()));
+    private static final SqlGenerateTemplate NO_OFFSET_SQL_GENERATE_TEMPLATE
+        = (template, sqlGenerateMeta) -> template.replace(COLUMN, sqlGenerateMeta.getColumns())
+        .replace(SCHEMA, sqlGenerateMeta.getSchema())
+        .replace(TABLE_NAME, sqlGenerateMeta.getTableName());
+    private static final SqlGenerate OFFSET_GENERATE = (sqlGenerateMeta) -> GENERATE_TEMPLATE.replace(
+        QuerySqlTemplate.QUERY_OFF_SET, sqlGenerateMeta);
+    private static final SqlGenerate NO_OFFSET_GENERATE = (sqlGenerateMeta) -> NO_OFFSET_SQL_GENERATE_TEMPLATE.replace(
+        QuerySqlTemplate.QUERY_NO_OFF_SET, sqlGenerateMeta);
+
+    private static final SqlGenerateTemplate QUERY_BETWEEN_TEMPLATE = (template, sqlGenerateMeta) -> template.replace(
+            COLUMN, sqlGenerateMeta.getColumns())
+        .replace(SCHEMA, sqlGenerateMeta.getSchema())
+        .replace(TABLE_NAME, sqlGenerateMeta.getTableName())
+        .replace(ORDER_BY, sqlGenerateMeta.getOrder())
+        .replace(PRIMARY_KEY, sqlGenerateMeta.getPrimaryKey())
+        .replace(START, String.valueOf(sqlGenerateMeta.getStart()))
+        .replace(OFFSET, String.valueOf(sqlGenerateMeta.getStart() + sqlGenerateMeta.getOffset() - 1));
+    private static final SqlGenerate QUERY_BETWEEN_GENERATE = (sqlGenerateMeta -> QUERY_BETWEEN_TEMPLATE.replace(
+        QUERY_BETWEEN_SET, sqlGenerateMeta));
 
     static {
         SQL_GENERATE.put(DataBaseType.MS, OFFSET_GENERATE);
@@ -99,6 +100,7 @@ public class SelectSqlBuilder {
     private boolean isFirst = false;
     private boolean isEnd = false;
     private boolean isCsvMode = false;
+    private String countSnippet = "count(1)";
 
     /**
      * Table fragment query SQL Statement Builder
@@ -114,7 +116,7 @@ public class SelectSqlBuilder {
     /**
      * Table fragment query SQL Statement Builder
      *
-     * @param start  start
+     * @param start start
      * @param offset offset
      * @return builder
      */
@@ -172,6 +174,7 @@ public class SelectSqlBuilder {
         this.isCsvMode = isCsvMode;
         return this;
     }
+
     /**
      * Table fragment query SQL Statement Builder
      *
@@ -183,25 +186,41 @@ public class SelectSqlBuilder {
         Assert.notEmpty(columnsMetas, Message.COLUMN_METADATA_EMPTY_NOT_TO_BUILD_SQL);
         final ConditionLimit conditionLimit = tableMetadata.getConditionLimit();
         if (Objects.nonNull(conditionLimit)) {
-            return buildSelectSqlConditionLimit(tableMetadata, conditionLimit);
+            return buildSelectSqlConditionLimit(tableMetadata, conditionLimit, null);
         } else if (isDivisions) {
-            return buildSelectSqlWherePrimary(tableMetadata);
+            return buildSelectSqlWherePrimary(tableMetadata, null);
         } else {
-            return buildSelectSqlOffsetZero(columnsMetas, tableMetadata.getTableName());
+            return buildSelectSqlOffsetZero(columnsMetas, tableMetadata.getTableName(), null);
         }
     }
 
-    String QUERY_WHERE_BETWEEN = "SELECT :columnsList FROM :schema.:tableName where :pkCondition :orderBy ";
+    /**
+     * select row count sql builder
+     *
+     * @return sql
+     */
+    public String countBuilder() {
+        Assert.isTrue(Objects.nonNull(tableMetadata), Message.TABLE_METADATA_NULL_NOT_TO_BUILD_SQL);
+        List columnsMetas = tableMetadata.getColumnsMetas();
+        Assert.notEmpty(columnsMetas, Message.COLUMN_METADATA_EMPTY_NOT_TO_BUILD_SQL);
+        final ConditionLimit conditionLimit = tableMetadata.getConditionLimit();
+        if (Objects.nonNull(conditionLimit)) {
+            return buildSelectSqlConditionLimit(tableMetadata, conditionLimit, countSnippet);
+        } else if (isDivisions) {
+            return buildSelectSqlWherePrimary(tableMetadata, countSnippet);
+        } else {
+            return buildSelectSqlOffsetZero(columnsMetas, tableMetadata.getTableName(), countSnippet);
+        }
+    }
 
-    private String buildSelectSqlWherePrimary(TableMetadata tableMetadata) {
+    private String buildSelectSqlWherePrimary(TableMetadata tableMetadata, String countSnippet) {
         List columnsMetas = tableMetadata.getColumnsMetas();
         String schemaEscape = escape(schema, dataBaseType);
         String tableName = escape(tableMetadata.getTableName(), dataBaseType);
-        String columnNames = getColumnNameList(columnsMetas, dataBaseType);
-        String primaryKey = escape(tableMetadata.getPrimaryMetas()
-                                                .get(0)
-                                                .getColumnName(), dataBaseType);
-        final String orderBy = getOrderBy(tableMetadata.getPrimaryMetas(), dataBaseType);
+        boolean isCountSnippet = StringUtils.isNotEmpty(countSnippet);
+        String columnNames = isCountSnippet ? countSnippet : getColumnNameList(columnsMetas, dataBaseType);
+        String primaryKey = escape(tableMetadata.getPrimaryMetas().get(0).getColumnName(), dataBaseType);
+        final String orderBy = isCountSnippet ? "" : getOrderBy(tableMetadata.getPrimaryMetas(), dataBaseType);
         String pkCondition;
         if (StringUtils.isNotEmpty(seqStart) && StringUtils.isNotEmpty(seqEnd)) {
             pkCondition = getPkCondition(primaryKey);
@@ -209,10 +228,10 @@ public class SelectSqlBuilder {
             pkCondition = getNumberPkCondition(primaryKey);
         }
         return QUERY_WHERE_BETWEEN.replace(COLUMN, columnNames)
-                                  .replace(SCHEMA, schemaEscape)
-                                  .replace(TABLE_NAME, tableName)
-                                  .replace(PK_CONDITION, pkCondition)
-                                  .replace(ORDER_BY, orderBy);
+            .replace(SCHEMA, schemaEscape)
+            .replace(TABLE_NAME, tableName)
+            .replace(PK_CONDITION, pkCondition)
+            .replace(ORDER_BY, orderBy);
     }
 
     private String getNumberPkCondition(String primaryKey) {
@@ -265,23 +284,24 @@ public class SelectSqlBuilder {
         }
     }
 
-    private String buildSelectSqlConditionLimit(TableMetadata tableMetadata, ConditionLimit conditionLimit) {
+    private String buildSelectSqlConditionLimit(TableMetadata tableMetadata, ConditionLimit conditionLimit,
+        String countSnippet) {
         List columnsMetas = tableMetadata.getColumnsMetas();
-        String columnNames = getColumnNameList(columnsMetas, dataBaseType);
+        boolean isCountSnippet = StringUtils.isNotEmpty(countSnippet);
+        String columnNames = isCountSnippet ? countSnippet : getColumnNameList(columnsMetas, dataBaseType);
         final String schemaEscape = escape(schema, dataBaseType);
         final String tableName = escape(tableMetadata.getTableName(), dataBaseType);
-        final String orderBy = getOrderBy(tableMetadata.getPrimaryMetas(), dataBaseType);
-        SqlGenerateMeta sqlGenerateMeta =
-            new SqlGenerateMeta(schemaEscape, tableName, columnNames, orderBy, conditionLimit.getStart(),
-                conditionLimit.getOffset());
+        final String orderBy = isCountSnippet ? "" : getOrderBy(tableMetadata.getPrimaryMetas(), dataBaseType);
+        SqlGenerateMeta sqlGenerateMeta = new SqlGenerateMeta(schemaEscape, tableName, columnNames, orderBy,
+            conditionLimit.getStart(), conditionLimit.getOffset());
         return getSqlGenerate(dataBaseType).replace(sqlGenerateMeta);
     }
 
     private String getOrderBy(List primaryMetas, DataBaseType dataBaseType) {
         return "order by " + primaryMetas.stream()
-                                         .map(ColumnsMetaData::getColumnName)
-                                         .map(key -> escape(key, dataBaseType) + " asc")
-                                         .collect(Collectors.joining(DELIMITER));
+            .map(ColumnsMetaData::getColumnName)
+            .map(key -> escape(key, dataBaseType) + " asc")
+            .collect(Collectors.joining(DELIMITER));
     }
 
     public String buildSelectSqlOffset(TableMetadata tableMetadata, long start, long offset) {
@@ -295,11 +315,9 @@ public class SelectSqlBuilder {
             sqlGenerateMeta = new SqlGenerateMeta(schemaEscape, tableName, columnNames, orderBy, start, offset);
             return getSqlGenerate(dataBaseType).replace(sqlGenerateMeta);
         } else {
-            String primaryKey = escape(tableMetadata.getPrimaryMetas()
-                                                    .get(0)
-                                                    .getColumnName(), dataBaseType);
-            sqlGenerateMeta =
-                new SqlGenerateMeta(schemaEscape, tableName, columnNames, orderBy, start, offset, primaryKey);
+            String primaryKey = escape(tableMetadata.getPrimaryMetas().get(0).getColumnName(), dataBaseType);
+            sqlGenerateMeta = new SqlGenerateMeta(schemaEscape, tableName, columnNames, orderBy, start, offset,
+                primaryKey);
             return QUERY_BETWEEN_GENERATE.replace(sqlGenerateMeta);
         }
     }
@@ -311,19 +329,20 @@ public class SelectSqlBuilder {
         return SqlUtil.escape(content, dataBaseType);
     }
 
-    private String buildSelectSqlOffsetZero(List columnsMetas, String tableName) {
-        String columnNames = getColumnNameList(columnsMetas, dataBaseType);
+    private String buildSelectSqlOffsetZero(List columnsMetas, String tableName, String countSnippet) {
+        boolean isCountSnippet = StringUtils.isNotEmpty(countSnippet);
+        String columnNames = isCountSnippet ? countSnippet : getColumnNameList(columnsMetas, dataBaseType);
         String schemaEscape = escape(schema, dataBaseType);
-        SqlGenerateMeta sqlGenerateMeta =
-            new SqlGenerateMeta(schemaEscape, escape(tableName, dataBaseType), columnNames);
+        SqlGenerateMeta sqlGenerateMeta = new SqlGenerateMeta(schemaEscape, escape(tableName, dataBaseType),
+            columnNames);
         return NO_OFFSET_GENERATE.replace(sqlGenerateMeta);
     }
 
     private String getColumnNameList(@NonNull List columnsMetas, DataBaseType dataBaseType) {
         return columnsMetas.stream()
-                           .map(ColumnsMetaData::getColumnName)
-                           .map(column -> escape(column, dataBaseType))
-                           .collect(Collectors.joining(DELIMITER));
+            .map(ColumnsMetaData::getColumnName)
+            .map(column -> escape(column, dataBaseType))
+            .collect(Collectors.joining(DELIMITER));
     }
 
     private SqlGenerate getSqlGenerate(DataBaseType dataBaseType) {
@@ -397,7 +416,7 @@ public class SelectSqlBuilder {
         /**
          * Generate SQL statement according to SQL generator metadata object
          *
-         * @param template        SQL template
+         * @param template SQL template
          * @param sqlGenerateMeta SQL generator metadata
          * @return sql
          */
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SinglePrimarySliceQueryStatement.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SinglePrimarySliceQueryStatement.java
index 48623fe..607e0e2 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SinglePrimarySliceQueryStatement.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/SinglePrimarySliceQueryStatement.java
@@ -59,6 +59,6 @@ public class SinglePrimarySliceQueryStatement implements SliceQueryStatement {
     }
 
     private Object translateOffset(boolean isDigit, String beginIdx) {
-        return isDigit ? Long.valueOf(beginIdx) : beginIdx;
+        return Objects.isNull(beginIdx) ? null : isDigit ? Long.valueOf(beginIdx) : beginIdx;
     }
 }
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/UnionPrimarySliceQueryStatement.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/UnionPrimarySliceQueryStatement.java
new file mode 100644
index 0000000..ca917e8
--- /dev/null
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/task/sql/UnionPrimarySliceQueryStatement.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2022-2022 Huawei Technologies Co.,Ltd.
+ *
+ * openGauss is licensed under Mulan PSL v2.
+ * You can use this software according to the terms and conditions of the Mulan PSL v2.
+ * You may obtain a copy of Mulan PSL v2 at:
+ *
+ *           http://license.coscl.org.cn/MulanPSL2
+ *
+ * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
+ * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
+ * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
+ * See the Mulan PSL v2 for more details.
+ */
+
+package org.opengauss.datachecker.extract.task.sql;
+
+import org.opengauss.datachecker.common.config.ConfigCache;
+import org.opengauss.datachecker.common.entry.enums.CheckMode;
+import org.opengauss.datachecker.common.entry.extract.ColumnsMetaData;
+import org.opengauss.datachecker.common.entry.extract.SliceVo;
+import org.opengauss.datachecker.common.entry.extract.TableMetadata;
+import org.opengauss.datachecker.extract.util.MetaDataUtil;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * single primary slice query statement
+ *
+ * @author :wangchao
+ * @date :Created in 2023/8/9
+ * @since :11
+ */
+public class UnionPrimarySliceQueryStatement implements SliceQueryStatement {
+    private final boolean isHalfOpenHalfClosed;
+
+    /**
+     * create SinglePrimarySliceQueryStatement
+     */
+    public UnionPrimarySliceQueryStatement() {
+        // csv mode, slice data scope is full closed , but jdbc mode ,slice data scope is half open and half closed
+        this.isHalfOpenHalfClosed = !Objects.equals(ConfigCache.getCheckMode(), CheckMode.CSV);
+    }
+
+    /**
+     * build slice count sql entry
+     *
+     * @param tableMetadata tableMetadata
+     * @param slice slice
+     * @return sql entry
+     */
+    public QuerySqlEntry buildSliceCount(TableMetadata tableMetadata, SliceVo slice) {
+        final SelectSqlBuilder sqlBuilder = new SelectSqlBuilder(tableMetadata);
+        sqlBuilder.isDivisions(slice.getTotal() > 1);
+        sqlBuilder.isFirstCondition(slice.getNo() == 1);
+        sqlBuilder.isEndCondition(slice.getNo() == slice.getTotal());
+        sqlBuilder.isHalfOpenHalfClosed(isHalfOpenHalfClosed);
+        sqlBuilder.isCsvMode(ConfigCache.isCsvMode());
+        ColumnsMetaData primaryKey = tableMetadata.getSinglePrimary();
+        boolean isDigit = MetaDataUtil.isDigitPrimaryKey(primaryKey);
+        Object offset = translateOffset(isDigit, slice.getBeginIdx());
+        Object endOffset = translateOffset(isDigit, slice.getEndIdx());
+        sqlBuilder.offset(offset, endOffset);
+        return new QuerySqlEntry(slice.getTable(), sqlBuilder.countBuilder(), offset, endOffset);
+    }
+
+    @Override
+    public QuerySqlEntry buildSlice(TableMetadata tableMetadata, SliceVo slice) {
+        final SelectSqlBuilder sqlBuilder = new SelectSqlBuilder(tableMetadata);
+        sqlBuilder.isDivisions(slice.getTotal() > 1);
+        sqlBuilder.isFirstCondition(slice.getNo() == 1);
+        sqlBuilder.isEndCondition(slice.getNo() == slice.getTotal());
+        sqlBuilder.isHalfOpenHalfClosed(isHalfOpenHalfClosed);
+        sqlBuilder.isCsvMode(ConfigCache.isCsvMode());
+        ColumnsMetaData primaryKey = tableMetadata.getSinglePrimary();
+        boolean isDigit = MetaDataUtil.isDigitPrimaryKey(primaryKey);
+        Object offset = translateOffset(isDigit, slice.getBeginIdx());
+        Object endOffset = translateOffset(isDigit, slice.getEndIdx());
+        sqlBuilder.offset(offset, endOffset);
+        return new QuerySqlEntry(slice.getTable(), sqlBuilder.builder(), offset, endOffset);
+    }
+
+    private Object translateOffset(boolean isDigit, String beginIdx) {
+        return Objects.isNull(beginIdx) ? null : isDigit ? Long.valueOf(beginIdx) : beginIdx;
+    }
+
+    /**
+     * build  slice select sql, if select count bigger than a large number,so we will select it by page select.
+     * page select for example select * from where ... limit xxx offset xxx
+     *
+     * @param baseSliceSql slice sql entry
+     * @param sliceCount slice total count
+     * @param fetchSize page select fetch size
+     * @return page select sql
+     */
+    public List buildPageStatement(QuerySqlEntry baseSliceSql, int sliceCount, int fetchSize) {
+        int totalPage = sliceCount / fetchSize + (sliceCount % fetchSize == 0 ? 0 : 1);
+        List statements = new ArrayList<>(totalPage);
+        for (int i = 0; i < totalPage; i++) {
+            StringBuilder sqlBuilder = new StringBuilder(baseSliceSql.getSql());
+            sqlBuilder.append(" limit ").append(fetchSize).append(" offset ").append(i * fetchSize);
+            statements.add(sqlBuilder.toString());
+        }
+        return statements;
+    }
+}
diff --git a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/util/HashHandler.java b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/util/HashHandler.java
index c2a6b83..3810a50 100644
--- a/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/util/HashHandler.java
+++ b/datachecker-extract/src/main/java/org/opengauss/datachecker/extract/util/HashHandler.java
@@ -16,13 +16,13 @@
 package org.opengauss.datachecker.extract.util;
 
 import net.openhft.hashing.LongHashFunction;
+
+import org.apache.commons.lang3.StringUtils;
 import org.springframework.util.CollectionUtils;
 
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
-import java.util.stream.Collectors;
 
 import static org.opengauss.datachecker.extract.constants.ExtConstants.PRIMARY_DELIMITER;
 
@@ -46,24 +46,35 @@ public class HashHandler {
      * find the corresponding value of the field in the map, and splice the found value.
      *
      * @param columnsValueMap Field corresponding query data
-     * @param columns         List of field names
+     * @param columns List of field names
      * @return Hash calculation result corresponding to the current row
      */
     public long xx3Hash(Map columnsValueMap, List columns) {
         if (CollectionUtils.isEmpty(columns)) {
             return 0L;
         }
-        String colValue =
-            columnsValueMap.entrySet().stream().filter(entry -> columns.contains(entry.getKey())).map(Entry::getValue)
-                           .collect(Collectors.joining());
-        return XX_3_HASH.hashChars(colValue);
+        StringBuilder valueBuffer = new StringBuilder();
+        for (String column : columns) {
+            valueBuffer.append(columnsValueMap.getOrDefault(column, ""));
+        }
+        return XX_3_HASH.hashChars(valueBuffer);
+    }
+
+    /**
+     * Hash calculation of a single field
+     *
+     * @param value field value
+     * @return hash result
+     */
+    public long xx3Hash(String value) {
+        return XX_3_HASH.hashChars(value);
     }
 
     /**
      * column hash result
      *
      * @param columnsValueMap columns value
-     * @param columns         column names
+     * @param columns column names
      * @return column hash result
      */
     public String value(Map columnsValueMap, List columns) {
@@ -71,11 +82,17 @@ public class HashHandler {
             return "";
         }
         List values = new ArrayList<>();
-        columns.forEach(column -> {
-            if (columnsValueMap.containsKey(column)) {
-                values.add(columnsValueMap.get(column));
-            }
-        });
-        return values.stream().map(String::valueOf).collect(Collectors.joining(PRIMARY_DELIMITER));
+        if (columns.size() == 1) {
+            return columnsValueMap.getOrDefault(columns.get(0), "");
+        } else if (columns.size() == 2) {
+            return columnsValueMap.get(columns.get(0)) + PRIMARY_DELIMITER + columnsValueMap.get(columns.get(1));
+        } else {
+            columns.forEach(column -> {
+                if (columnsValueMap.containsKey(column)) {
+                    values.add(columnsValueMap.get(column));
+                }
+            });
+            return StringUtils.join(values, PRIMARY_DELIMITER);
+        }
     }
 }
diff --git a/datachecker-extract/src/main/resources/application-sink.yml b/datachecker-extract/src/main/resources/application-sink.yml
index 2cd0275..d9348e1 100644
--- a/datachecker-extract/src/main/resources/application-sink.yml
+++ b/datachecker-extract/src/main/resources/application-sink.yml
@@ -43,4 +43,5 @@ spring:
       maxPoolPreparedStatementPerConnectionSize: 20
       useGlobalDataSourceStat: true
       connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=500
+      maxAllowedPacketSize: 1073741824
 
diff --git a/datachecker-extract/src/main/resources/application-source.yml b/datachecker-extract/src/main/resources/application-source.yml
index a74745c..37cf04b 100644
--- a/datachecker-extract/src/main/resources/application-source.yml
+++ b/datachecker-extract/src/main/resources/application-source.yml
@@ -49,3 +49,4 @@ spring:
       maxPoolPreparedStatementPerConnectionSize: 20
       useGlobalDataSourceStat: true
       connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=500
+      maxAllowedPacketSize: 1073741824
diff --git a/datachecker-extract/src/test/java/org/opengauss/datachecker/extract/dao/BaseDataResultSetHandlerTest.java b/datachecker-extract/src/test/java/org/opengauss/datachecker/extract/dao/BaseDataResultSetHandlerTest.java
index 1b7dd71..f18c42e 100644
--- a/datachecker-extract/src/test/java/org/opengauss/datachecker/extract/dao/BaseDataResultSetHandlerTest.java
+++ b/datachecker-extract/src/test/java/org/opengauss/datachecker/extract/dao/BaseDataResultSetHandlerTest.java
@@ -39,7 +39,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.TreeMap;
 import java.util.function.BiFunction;
 import java.util.stream.Collectors;
 
@@ -129,10 +128,8 @@ public class BaseDataResultSetHandlerTest {
             preparedStatement = connection.prepareStatement(executeQueryStatement);
             resultSet = preparedStatement.executeQuery();
             ResultSetMetaData rsmd = resultSet.getMetaData();
-            Map values = new TreeMap<>();
             while (resultSet.next()) {
-                resultSetHandler.putOneResultSetToMap(tableName, rsmd, resultSet, values);
-                result.add(new HashMap<>(values));
+                result.add(resultSetHandler.putOneResultSetToMap(tableName, rsmd, resultSet));
             }
         } catch (SQLException sqlErr) {
             log.error("test table [{}] error", tableName, sqlErr);
diff --git a/run.sh b/run.sh
new file mode 100644
index 0000000..5dc5117
--- /dev/null
+++ b/run.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+
+run_path=$(cd `dirname $0`; pwd)
+
+function parse_app_info() {
+    local file_name="$1"
+    if [[ $file_name =~ ^(datachecker-check|datachecker-extract)-([0-9]+\.[0-9]+\.[0-9]+(\.[a-zA-Z0-9]+)?)\.jar$ ]]; then
+        local app_name="${BASH_REMATCH[1]}"
+        local app_version="${BASH_REMATCH[2]}"
+        echo "$app_name"
+        echo "$app_version"
+    else
+        echo "Invalid file name format: $file_name"
+        exit 1
+    fi
+}
+
+check_file=$(find $run_path -maxdepth 1 -name "datachecker-check-*.jar" | head -n 1)
+if [ -n "$check_file" ]; then
+    IFS='-' read -ra parts <<< "$(basename $check_file)"
+    app_check_name=$(basename $check_file)
+    app_check_version="${parts[-1]%.jar}"
+else
+    echo "No datachecker-check application file found."
+    exit 1
+fi
+
+extract_files=$(find $run_path -maxdepth 1 -name "datachecker-extract-*.jar")
+if [ ${#extract_files} -gt 0 ]; then
+    app_extract_names=()
+    app_extract_versions=()
+    for extract_file in $extract_files; do
+        IFS='-' read -ra parts <<< "$(basename $extract_file)"
+        app_extract_names+=("$(basename $extract_file)")
+        app_extract_versions+=("${parts[-1]%.jar}")
+    done
+else
+    echo "No datachecker-extract application file found."
+    exit 1
+fi
+
+extract_source="--source"
+extract_sink="--sink"
+
+function start_apps() {
+    echo "Starting datachecker-check application..."
+    sleep 1s
+    nohup java -jar $run_path/$app_check_name > /dev/null 2>&1 &
+    echo "datachecker-check started with PID: $!"
+    sleep 2s
+    echo "Starting datachecker-extract applications..."
+    nohup java -jar $run_path/${app_extract_names[0]} $extract_source > /dev/null 2>&1 &
+    echo "datachecker-extract instance $extract_source started with PID: $!"
+     sleep 2s
+    nohup java -jar $run_path/${app_extract_names[0]} $extract_sink > /dev/null 2>&1 &
+    echo "datachecker-extract instance $extract_sink started with PID: $!"
+    sleep 2s
+}
+
+function stop_apps() {
+    echo "Stopping datachecker-check application..."
+    pids=$(ps -ef | grep "$run_path/$app_check_name" | grep -v grep | awk '{print $2}')
+    if [ -n "$pids" ]; then
+        for pid in $pids; do
+            kill $pid
+            sleep 2s
+            echo "Killed datachecker-check process with PID: $pid"
+        done
+    else
+        echo "datachecker-check application is not running."
+    fi
+
+    echo "Stopping datachecker-extract applications..."
+    for ((i = 0; i < 2; i++)); do
+        extract_name=${app_extract_names[$i]}
+        pids=$(ps -ef | grep "$run_path/$extract_name" | grep -v grep | awk '{print $2}')
+        if [ -n "$pids" ]; then
+            for pid in $pids; do
+                kill $pid
+                sleep 2s
+                echo "Killed datachecker-extract instance $((i + 1)) process with PID: $pid"
+            done
+        else
+            echo "datachecker-extract instance $((i + 1)) is not running."
+        fi
+    done
+}
+
+case "$1" in
+    "start")
+        start_apps
+        ;;
+    "stop")
+        stop_apps
+        ;;
+    *)
+        echo "Usage: $0 {start|stop}"
+        exit 1
+        ;;
+esac
\ No newline at end of file
-- 
Gitee