浏览代码

feat: 配置同步服务

xdd 2 周之前
父节点
当前提交
80175e61e8

+ 33 - 0
fs-sync/.gitignore

@@ -0,0 +1,33 @@
+HELP.md
+target/
+!.mvn/wrapper/maven-wrapper.jar
+!**/src/main/**/target/
+!**/src/test/**/target/
+
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
+
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+!**/src/main/**/build/
+!**/src/test/**/build/
+
+### VS Code ###
+.vscode/

+ 80 - 0
fs-sync/pom.xml

@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.example</groupId>
+    <artifactId>fs-sync</artifactId>
+    <version>0.0.1-SNAPSHOT</version>
+    <name>fs-sync</name>
+    <description>fs-sync</description>
+    <parent>
+        <artifactId>fs</artifactId>
+        <groupId>com.fs</groupId>
+        <version>1.1.0</version>
+    </parent>
+    <properties>
+        <java.version>1.8</java.version>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+    </properties>
+    <dependencies>
+
+        <!-- flink cdc 相关 start -->
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-connector-base</artifactId>
+            <version>1.14.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.ververica</groupId>
+            <artifactId>flink-sql-connector-mysql-cdc</artifactId>
+            <version>2.3.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-streaming-java_2.12</artifactId>
+            <version>1.14.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-clients_2.12</artifactId>
+            <version>1.14.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-runtime-web_2.12</artifactId>
+            <version>1.14.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-table-runtime_2.12</artifactId>
+            <version>1.14.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.fs</groupId>
+            <artifactId>fs-service-system</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.8.1</version>
+                <configuration>
+                    <source>1.8</source>
+                    <target>1.8</target>
+                    <encoding>UTF-8</encoding>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

+ 23 - 0
fs-sync/src/main/java/com/fs/fssync/FsSyncApplication.java

@@ -0,0 +1,23 @@
+package com.fs.fssync;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
+import org.springframework.boot.autoconfigure.web.servlet.ServletWebServerFactoryAutoConfiguration;
+import org.springframework.boot.autoconfigure.web.servlet.WebMvcAutoConfiguration;
+
+@SpringBootApplication(
+        exclude = {
+                WebMvcAutoConfiguration.class,
+                ServletWebServerFactoryAutoConfiguration.class,
+                DataSourceAutoConfiguration.class,
+                DataSourceTransactionManagerAutoConfiguration.class
+        }
+)
+public class FsSyncApplication {
+    public static void main(String[] args) {
+        SpringApplication.run(FsSyncApplication.class, args);
+    }
+
+}

+ 72 - 0
fs-sync/src/main/java/com/fs/fssync/config/FastJson2JsonRedisSerializer.java

@@ -0,0 +1,72 @@
+package com.fs.fssync.config;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.parser.ParserConfig;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.fasterxml.jackson.databind.JavaType;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.type.TypeFactory;
+import org.springframework.data.redis.serializer.RedisSerializer;
+import org.springframework.data.redis.serializer.SerializationException;
+import org.springframework.util.Assert;
+
+import java.nio.charset.Charset;
+
+/**
+ * Redis使用FastJson序列化
+ *
+
+ */
+public class FastJson2JsonRedisSerializer<T> implements RedisSerializer<T>
+{
+    @SuppressWarnings("unused")
+    private ObjectMapper objectMapper = new ObjectMapper();
+
+    public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
+
+    private Class<T> clazz;
+
+    static
+    {
+        ParserConfig.getGlobalInstance().setAutoTypeSupport(true);
+    }
+
+    public FastJson2JsonRedisSerializer(Class<T> clazz)
+    {
+        super();
+        this.clazz = clazz;
+    }
+
+    @Override
+    public byte[] serialize(T t) throws SerializationException
+    {
+        if (t == null)
+        {
+            return new byte[0];
+        }
+        return JSON.toJSONString(t, SerializerFeature.WriteClassName).getBytes(DEFAULT_CHARSET);
+    }
+
+    @Override
+    public T deserialize(byte[] bytes) throws SerializationException
+    {
+        if (bytes == null || bytes.length <= 0)
+        {
+            return null;
+        }
+        String str = new String(bytes, DEFAULT_CHARSET);
+
+        return JSON.parseObject(str, clazz);
+    }
+
+    public void setObjectMapper(ObjectMapper objectMapper)
+    {
+        Assert.notNull(objectMapper, "'objectMapper' must not be null");
+        this.objectMapper = objectMapper;
+    }
+
+    protected JavaType getJavaType(Class<?> clazz)
+    {
+        return TypeFactory.defaultInstance().constructType(clazz);
+    }
+}

+ 48 - 0
fs-sync/src/main/java/com/fs/fssync/config/FlinkConfig.java

@@ -0,0 +1,48 @@
+package com.fs.fssync.config;
+
+import lombok.Data;
+import lombok.NoArgsConstructor;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+@Data
+@NoArgsConstructor
+@Component
+@ConfigurationProperties(prefix = "flink")
+public class FlinkConfig {
+    private CdcConfig cdc = new CdcConfig();
+    private CheckpointConfig checkpoint = new CheckpointConfig();
+    private ParallelismConfig parallelism = new ParallelismConfig();
+
+    @Data
+    @NoArgsConstructor
+    public static class CdcConfig {
+        private MySqlConfig mysql = new MySqlConfig();
+    }
+
+    @Data
+    @NoArgsConstructor
+    public static class MySqlConfig {
+        private String hostname;
+        private int port;
+        private String databaseList;
+        private String tableList;
+        private String username;
+        private String password;
+        private String serverTimeZone;
+        private String startupOptions;
+    }
+
+    @Data
+    @NoArgsConstructor
+    public static class CheckpointConfig {
+        private int interval;
+    }
+
+    @Data
+    @NoArgsConstructor
+    public static class ParallelismConfig {
+        private int source;
+        private int sink;
+    }
+}

+ 43 - 0
fs-sync/src/main/java/com/fs/fssync/config/RedisConfig.java

@@ -0,0 +1,43 @@
+package com.fs.fssync.config;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.springframework.cache.annotation.CachingConfigurerSupport;
+import org.springframework.cache.annotation.EnableCaching;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.data.redis.connection.RedisConnectionFactory;
+import org.springframework.data.redis.core.RedisTemplate;
+import org.springframework.data.redis.serializer.StringRedisSerializer;
+
+/**
+ * redis配置
+ *
+
+ */
+@Configuration
+@EnableCaching
+public class RedisConfig extends CachingConfigurerSupport
+{
+    @Bean
+    @SuppressWarnings(value = { "unchecked", "rawtypes" })
+    public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory connectionFactory)
+    {
+        RedisTemplate<String, Object> template = new RedisTemplate<>();
+        template.setConnectionFactory(connectionFactory);
+
+        FastJson2JsonRedisSerializer serializer = new FastJson2JsonRedisSerializer(Object.class);
+
+        ObjectMapper mapper = new ObjectMapper();
+        mapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
+        mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
+        serializer.setObjectMapper(mapper);
+
+        template.setValueSerializer(serializer);
+        // 使用StringRedisSerializer来序列化和反序列化redis的key值
+        template.setKeySerializer(new StringRedisSerializer());
+        template.afterPropertiesSet();
+        return template;
+    }
+}

+ 34 - 0
fs-sync/src/main/java/com/fs/fssync/listener/CustomSink.java

@@ -0,0 +1,34 @@
+package com.fs.fssync.listener;
+
+import com.alibaba.fastjson.JSON;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
+import org.springframework.stereotype.Component;
+
+/**
+ * Flink CDC 数据处理
+ *
+ * @author jokey
+ * @since 2024-3-14
+ */
+@Slf4j
+@Component
+public class CustomSink extends RichSinkFunction<String> {
+
+    @Override
+    public void invoke(String value, Context context) {
+        log.info("收到变更原始数据:{}", value);
+
+        String op = JSON.parseObject(value).getString("op");
+        if ("c".equals(op)) {
+            log.info("新增了一条数据 ...");
+        } else if ("u".equals(op)) {
+            log.info("更新了一条数据 ...");
+        } else if ("d".equals(op)) {
+            log.info("删除了一条数据 ...");
+        } else {
+            log.error("{} 未知的操作类型!", op);
+        }
+    }
+
+}

+ 84 - 0
fs-sync/src/main/java/com/fs/fssync/listener/MySqlEventListener.java

@@ -0,0 +1,84 @@
+package com.fs.fssync.listener;
+
+import com.fs.fssync.config.FlinkConfig;
+import com.ververica.cdc.connectors.mysql.source.MySqlSource;
+import com.ververica.cdc.connectors.mysql.table.StartupOptions;
+import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.stereotype.Component;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * MySQL事件监听
+ *
+ * @author jokey
+ * @since 2024-3-14
+ */
+@Slf4j
+@Component
+public class MySqlEventListener implements CommandLineRunner {
+
+    @Autowired
+    private CustomSink customSink;
+    @Autowired
+    private FlinkConfig flinkConfig;
+
+    @Override
+    public void run(String... args) throws Exception {
+        FlinkConfig.MySqlConfig mysqlConfig = flinkConfig.getCdc().getMysql();
+        Map<String, String> configMap = new HashMap<>();
+        configMap.put("decimal.handling.mode", "string");
+        configMap.put("bigint.unsigned.handling.mode", "long");
+        // 时间日期类型配置
+        configMap.put("time.precision.mode", "adaptive_time_microseconds"); // TIME类型精度处理
+        configMap.put("datetime.handling.mode", "string");     // DATETIME类型处理
+        // 二进制数据配置
+        configMap.put("binary.handling.mode", "bytes");        // BINARY/VARBINARY类型
+        configMap.put("json.handling.mode", "string");         // JSON类型
+        configMap.put("geometry.handling.mode", "string");     // 几何类型数据
+        configMap.put("include.unknown.datatypes", "false");   // 未知数据类型处理
+        // 将Map转换为Properties
+        Properties debeziumProps = new Properties();
+        debeziumProps.putAll(configMap);
+        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
+                .debeziumProperties(debeziumProps)
+                .hostname(mysqlConfig.getHostname())
+                .port(mysqlConfig.getPort())
+                .databaseList(mysqlConfig.getDatabaseList())
+                .tableList(mysqlConfig.getTableList())
+                .username(mysqlConfig.getUsername())
+                .password(mysqlConfig.getPassword())
+                .deserializer(new JsonDebeziumDeserializationSchema())
+                .startupOptions(getStartupOptions(mysqlConfig.getStartupOptions()))
+                .serverTimeZone(mysqlConfig.getServerTimeZone())
+                .build();
+        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+        env.enableCheckpointing(flinkConfig.getCheckpoint().getInterval());
+        DataStreamSource<String> streamSource = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source")
+                .setParallelism(flinkConfig.getParallelism().getSource());
+
+        streamSource.addSink(customSink).setParallelism(flinkConfig.getParallelism().getSink());
+        env.execute("Print MySQL Snapshot + Binlog");
+    }
+    private StartupOptions getStartupOptions(String option) {
+        if (option == null) {
+            return StartupOptions.latest();
+        }
+
+        switch (option.toLowerCase()) {
+            case "initial": return StartupOptions.initial();
+            case "latest": return StartupOptions.latest();
+            // 可以添加其他选项如 specificOffset, timestamp 等
+            default: return StartupOptions.latest();
+        }
+    }
+}

+ 75 - 0
fs-sync/src/main/java/com/fs/fssync/sink/AbstractCdcSinkStrategy.java

@@ -0,0 +1,75 @@
+package com.fs.fssync.sink;
+
+import cn.hutool.extra.spring.SpringUtil;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.PropertyNamingStrategy;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.redis.core.RedisTemplate;
+
+import java.lang.reflect.Field;
+
+@Slf4j
+public abstract class AbstractCdcSinkStrategy<T> implements CdcSinkStrategy<T> {
+
+    private final ObjectMapper objectMapper;
+    @Autowired
+    protected RedisTemplate<String, Object> redisTemplate;
+
+    protected AbstractCdcSinkStrategy() {
+        this.objectMapper = new ObjectMapper();
+        this.objectMapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
+        this.objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
+    }
+
+    @Override
+    public void process(String cdcData) throws Exception {
+
+        JsonNode rootNode = objectMapper.readTree(cdcData);
+        String op = rootNode.get("op").asText();
+
+        if ("c".equals(op)) {
+            T after = objectMapper.treeToValue(rootNode.get("after"), getEntityClass());
+            handleCreate(after);
+        } else if ("u".equals(op)) {
+            T after = objectMapper.treeToValue(rootNode.get("after"), getEntityClass());
+            handleUpdate(after);
+        } else if ("d".equals(op)) {
+            T before = objectMapper.treeToValue(rootNode.get("before"), getEntityClass());
+            handleDelete(before);
+        } else {
+            log.warn("未知的操作类型: {}", op);
+        }
+    }
+
+    protected void handleCreate(T entity) throws Exception {
+        Object id = getIdValue(entity);
+        if (id != null) {
+            String key = getKeyPrefix() + id;
+            redisTemplate.opsForValue().set(key, entity);
+            log.info("数据已写入 Redis, key={}", key);
+        }
+    }
+
+    protected void handleUpdate(T entity) throws Exception {
+        handleCreate(entity);
+    }
+
+    protected void handleDelete(T entity) throws Exception {
+        Object id = getIdValue(entity);
+        if (id != null) {
+            String key = getKeyPrefix() + id;
+            redisTemplate.delete(key);
+            log.info("数据已从 Redis 删除, key={}", key);
+        }
+    }
+
+    protected Object getIdValue(T entity) throws Exception {
+        // 获取ID字段的值,可以改为使用反射工具类
+        Field field = getEntityClass().getDeclaredField("id");
+        field.setAccessible(true);
+        return field.get(entity);
+    }
+}

+ 23 - 0
fs-sync/src/main/java/com/fs/fssync/sink/CdcSinkStrategy.java

@@ -0,0 +1,23 @@
+package com.fs.fssync.sink;
+
+public interface CdcSinkStrategy<T> {
+    /**
+     * 检查是否能处理指定表的CDC数据
+     */
+    boolean canHandle(String table);
+
+    /**
+     * 处理CDC数据
+     */
+    void process(String cdcData) throws Exception;
+
+    /**
+     * 获取处理的实体类
+     */
+    Class<T> getEntityClass();
+
+    /**
+     * 获取Redis键前缀
+     */
+    String getKeyPrefix();
+}

+ 24 - 0
fs-sync/src/main/java/com/fs/fssync/sink/impl/FsUserSinkStrategy.java

@@ -0,0 +1,24 @@
+package com.fs.fssync.sink.impl;
+
+import com.fs.fssync.sink.AbstractCdcSinkStrategy;
+import com.fs.store.domain.FsUser;
+import org.springframework.stereotype.Component;
+
+@Component
+public class FsUserSinkStrategy extends AbstractCdcSinkStrategy<FsUser> {
+
+    @Override
+    public boolean canHandle(String table) {
+        return "fs_user".equals(table);
+    }
+
+    @Override
+    public Class<FsUser> getEntityClass() {
+        return FsUser.class;
+    }
+
+    @Override
+    public String getKeyPrefix() {
+        return "fs:user:";
+    }
+}

+ 25 - 0
fs-sync/src/main/resources/application-dev.yml

@@ -0,0 +1,25 @@
+# 数据源配置
+spring:
+    jackson:
+        time-zone: GMT+8 #如果有时区问题,设置时区
+    # redis 配置
+    redis:
+        # 地址
+        host: 127.0.0.1
+        # 端口,默认为6379
+        port: 6379
+        # 密码
+        password:
+        # 连接超时时间
+        timeout: 30s
+        lettuce:
+            pool:
+                # 连接池中的最小空闲连接
+                min-idle: 0
+                # 连接池中的最大空闲连接
+                max-idle: 8
+                # 连接池的最大数据库连接数
+                max-active: 8
+                # #连接池最大阻塞等待时间(使用负值表示没有限制)
+                max-wait: -1ms
+        database: 0

+ 48 - 0
fs-sync/src/main/resources/application.yml

@@ -0,0 +1,48 @@
+logging:
+  level:
+    org.apache.flink.runtime.checkpoint: warn
+    org.apache.flink.runtime.source.coordinator.SourceCoordinator: warn
+    com.ververica.cdc.connectors.mysql.source.reader.MySqlSourceReader: warn
+
+flink:
+  cdc:
+    mysql:
+      hostname: gz-cdb-of55khc9.sql.tencentcdb.com
+      port: 23620
+      databaseList: fs_hospital
+      tableList: fs_hospital.fs_user
+      username: root
+      password: Rtyy_2023
+      serverTimeZone: Asia/Shanghai
+      startupOptions: latest # 可选值: latest, initial, specificOffset, timestamp
+  checkpoint:
+    interval: 3000 # 单位:毫秒
+  parallelism:
+    source: 1
+    sink: 1
+
+# 开发环境配置
+server:
+  # 服务器的HTTP端口,默认为 7011  store 7111
+  port: 7015
+  servlet:
+    # 应用的访问路径
+    context-path: /
+  tomcat:
+    # tomcat的URI编码
+    uri-encoding: UTF-8
+    # tomcat最大线程数,默认为200
+    max-threads: 800
+    # Tomcat启动初始化的线程数,默认值25
+    min-spare-threads: 30
+# Spring配置
+spring:
+  main:
+    web-application-type: none
+  # 资源信息
+  messages:
+    # 国际化资源文件路径
+    basename: i18n/messages
+  profiles:
+    active: dev
+    include: config

+ 13 - 0
fs-sync/src/test/java/com/fs/fssync/FsSyncApplicationTests.java

@@ -0,0 +1,13 @@
+package com.fs.fssync;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+class FsSyncApplicationTests {
+
+    @Test
+    void contextLoads() {
+    }
+
+}

+ 1 - 0
pom.xml

@@ -218,6 +218,7 @@
         <module>fs-user-app</module>
         <module>fs-api</module>
         <module>fs-socket</module>
+        <module>fs-sync</module>
     </modules>
     <packaging>pom</packaging>