diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b4b1d8e39d..508014de35 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -21,18 +21,23 @@ updates: directory: "/" open-pull-requests-limit: 15 schedule: - interval: "weekly" + interval: "monthly" ignore: - dependency-name: "*" - update-types: [ "version-update:semver-major" ] + update-types: [ "version-update:semver-major", "version-update:semver-patch" ] + - dependency-name: "software.amazon.awssdk:s3" + update-types: [ "version-update:semver-patch" ] + - dependency-name: "com.aliyun:dingtalk" + update-types: [ "version-update:semver-patch" ] - package-ecosystem: "gomod" directory: "eventmesh-sdks/eventmesh-sdk-go" + # Disabled temporarily since the Go SDK is not integrated with CI + open-pull-requests-limit: 0 schedule: interval: "monthly" ignore: - dependency-name: "*" - # Disabled temporarily since the Go SDK is not integrated with CI - update-types: [ "version-update:semver-major", "version-update:semver-minor", "version-update:semver-patch" ] + update-types: [ "version-update:semver-major", "version-update:semver-patch" ] - package-ecosystem: "github-actions" directory: "/" schedule: diff --git a/.github/workflows/auto-dependabot.yml b/.github/workflows/auto-dependabot.yml index 0d43a05866..512a43f6ec 100644 --- a/.github/workflows/auto-dependabot.yml +++ b/.github/workflows/auto-dependabot.yml @@ -27,7 +27,7 @@ jobs: # Pull request Auto merge is not enabled for this repository dependabot: runs-on: ubuntu-latest - if: github.actor == 'dependabot[bot]' + if: github.event.pull_request.user.login == 'dependabot[bot]' steps: - name: Dependabot metadata id: metadata diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce5eae95b7..d63c381107 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,16 +39,15 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + submodules: true - - if: matrix.language == 'cpp' || matrix.language == 'csharp' - name: Build C - run: | - git submodule init - git submodule update - make -C ./eventmesh-sdks/eventmesh-sdk-c + - name: Build C SDK + if: matrix.language == 'cpp' + run: make -C ./eventmesh-sdks/eventmesh-sdk-c - name: Setup Gradle - uses: gradle/actions/setup-gradle@v3 + uses: gradle/actions/setup-gradle@v4 - name: Set up JDK 11 uses: actions/setup-java@v4 @@ -59,7 +58,7 @@ jobs: - name: GenerateGrammarSource run: ./gradlew clean generateGrammarSource --parallel --daemon --scan env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} - name: Set up JDK ${{ matrix.java }} uses: actions/setup-java@v4 @@ -69,14 +68,16 @@ jobs: # https://docs.gradle.org/current/userguide/performance.html - name: Build - run: ./gradlew clean build dist jacocoTestReport -x spotlessJava -x generateGrammarSource --parallel --daemon --scan + run: > + ./gradlew clean build dist jacocoTestReport --parallel --daemon --scan + -x spotlessJava -x generateGrammarSource -x generateDistLicense -x checkDeniedLicense env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} - name: Install plugin run: ./gradlew installPlugin --scan env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} - name: Upload coverage report to codecov.io run: bash <(curl -s https://codecov.io/bash) || echo 'Failed to upload coverage report!' diff --git a/.github/workflows/code-scanning.yml b/.github/workflows/code-scanning.yml index 0a7b2ecb28..5476923b0a 100644 --- a/.github/workflows/code-scanning.yml +++ b/.github/workflows/code-scanning.yml @@ -60,7 +60,7 @@ jobs: - name: Setup Gradle if: matrix.language == 'java' - uses: gradle/actions/setup-gradle@v3 + uses: gradle/actions/setup-gradle@v4 with: cache-disabled: true @@ -68,7 +68,7 @@ jobs: if: matrix.language == 'java' run: ./gradlew clean assemble compileTestJava --parallel --daemon --scan env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} - name: Perform CodeQL analysis uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index dfb36970d0..171c787369 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -41,10 +41,10 @@ jobs: apache/eventmesh - name: Build and push - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} file: ./docker/Dockerfile_jdk8 - context: ./ + context: ./ \ No newline at end of file diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml index ca0e0936a6..9172e85414 100644 --- a/.github/workflows/license.yml +++ b/.github/workflows/license.yml @@ -38,7 +38,7 @@ jobs: java-version: 11 - name: Setup Gradle - uses: gradle/actions/setup-gradle@v3 + uses: gradle/actions/setup-gradle@v4 - name: Check license compatibility run: ./gradlew clean checkDeniedLicense diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c1e4f4ab15..8b5b166215 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -42,7 +42,7 @@ jobs: It has been 60 days since the last activity on this pull request. I am reaching out here to gently remind you that the Apache EventMesh community values every pull request, and please feel free to get in touch with the reviewers at any time. They are available to assist you in advancing the progress of your pull request and offering the latest feedback. If you encounter any challenges during development, seeking support within the community is encouraged. We sincerely appreciate your contributions to Apache EventMesh. - exempt-issue-labels: 'pinned,discussion,help wanted,WIP,weopen-star,GLCC,summer of code' + exempt-issue-labels: 'pinned,discussion,help wanted,WIP,weopen-star,GLCC,GSoC' exempt-pr-labels: 'help wanted,dependencies' exempt-all-milestones: true # Exempt all issues/PRs with milestones from stale operations-per-run: 300 diff --git a/.idea/icon.png b/.idea/icon.png deleted file mode 100644 index 59aa06dac9..0000000000 Binary files a/.idea/icon.png and /dev/null differ diff --git a/build.gradle b/build.gradle index 3fc27890d1..c094f62551 100644 --- a/build.gradle +++ b/build.gradle @@ -42,17 +42,17 @@ buildscript { dependencies { classpath "com.github.spotbugs.snom:spotbugs-gradle-plugin:5.2.5" - classpath "io.spring.gradle:dependency-management-plugin:1.0.11.RELEASE" + classpath "io.spring.gradle:dependency-management-plugin:1.1.6" classpath "com.diffplug.spotless:spotless-plugin-gradle:6.13.0" classpath "org.apache.httpcomponents:httpclient:4.5.14" - classpath "commons-io:commons-io:2.11.0" + classpath "commons-io:commons-io:2.17.0" } } plugins { id 'org.cyclonedx.bom' version '1.8.2' - id 'com.github.jk1.dependency-license-report' version '2.7' + id 'com.github.jk1.dependency-license-report' version '2.9' } allprojects { @@ -95,10 +95,12 @@ allprojects { checkstyleMain.exclude('**/org/apache/eventmesh/client/grpc/protos**') .exclude('**/org/apache/eventmesh/common/protocol/grpc/cloudevents**') - .exclude('**/org/apache/eventmesh/common/protocol/grpc/protos/**') + .exclude('**/org/apache/eventmesh/common/protocol/grpc/proto**') + .exclude('**/org/apache/eventmesh/common/protocol/grpc/adminserver/**') .exclude('**/org/apache/eventmesh/connector/openfunction/client/EventMeshGrpcService**') .exclude('**/org/apache/eventmesh/connector/openfunction/client/CallbackServiceGrpc**') .exclude('**/org/apache/eventmesh/connector/jdbc/antlr**') + .exclude('**/org/apache/eventmesh/meta/raft/rpc/**') dependencies { repositories { @@ -107,7 +109,7 @@ allprojects { url "https://maven.aliyun.com/repository/public" } } - testImplementation "org.junit.jupiter:junit-jupiter:5.6.0" + testImplementation "org.junit.jupiter:junit-jupiter:5.11.0" } spotless { @@ -161,9 +163,13 @@ tasks.register('dist') { ["eventmesh-common", "eventmesh-meta:eventmesh-meta-api", "eventmesh-metrics-plugin:eventmesh-metrics-api", + "eventmesh-openconnect:eventmesh-openconnect-java", + "eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api", "eventmesh-protocol-plugin:eventmesh-protocol-api", + "eventmesh-registry:eventmesh-registry-api", "eventmesh-retry:eventmesh-retry-api", "eventmesh-runtime", + "eventmesh-runtime-v2", "eventmesh-security-plugin:eventmesh-security-api", "eventmesh-spi", "eventmesh-starter", @@ -204,6 +210,48 @@ tasks.register('dist') { } } +tasks.register('dist-admin') { + subprojects.forEach { subProject -> + dependsOn("${subProject.path}:jar") + } + def includedProjects = + [ + "eventmesh-admin-server", + "eventmesh-common", + "eventmesh-spi", + "eventmesh-registry:eventmesh-registry-api", + "eventmesh-registry:eventmesh-registry-nacos", + "eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api" + ] + doLast { + includedProjects.each { + def subProject = findProject(it) + copy { + from subProject.jar.archivePath + into rootProject.file('dist/apps') + } + copy { + from subProject.configurations.runtimeClasspath + into rootProject.file('dist/lib') + exclude 'eventmesh-*' + } + copy { + from subProject.file('bin') + into rootProject.file('dist/bin') + } + copy { + from subProject.file('conf') + from subProject.sourceSets.main.resources.srcDirs + into rootProject.file('dist/conf') + duplicatesStrategy = DuplicatesStrategy.EXCLUDE + exclude 'META-INF' + } + + } + } + +} + tasks.register('installPlugin') { var pluginProjects = subprojects.findAll { it.file('gradle.properties').exists() @@ -402,10 +450,11 @@ tasks.register('checkDeniedLicense') { "GPL-1.0", "GPL-2.0", "GPL-3.0", "AGPL-3.0", "LGPL-2.0", "LGPL-2.1", "LGPL-3.0", "GPL-1.0-only", "GPL-2.0-only", "GPL-3.0-only", "AGPL-3.0-only", "LGPL-2.0-only", "LGPL-2.1-only", "LGPL-3.0-only", "QPL-1.0", "Sleepycat", "SSPL-1.0", "CPOL-1.02", - "BSD-4-Clause", "BSD-4-Clause-UC", "NPL-1.0", "NPL-1.1", "JSON" + "BSD-4-Clause", "BSD-4-Clause-UC", "NPL-1.0", "NPL-1.1", "JSON", + "The GNU General Public License, v2 with Universal FOSS Exception, v1.0" ] // Update exemptions according to https://github.com/apache/eventmesh/issues/4842 - def allowedArtifacts = ["amqp-client", "stax-api", "javassist", "ST4", "xsdlib"] + def allowedArtifacts = ["amqp-client", "stax-api", "javassist", "ST4", "xsdlib", "jsqlparser"] def licenseFile = file('tools/dist-license/LICENSE') def lines = licenseFile.readLines() @@ -648,33 +697,30 @@ subprojects { sign publishing.publications.mavenJava } - def grpcVersion = '1.64.0' - def log4jVersion = '2.23.1' - def jacksonVersion = '2.17.1' - def dropwizardMetricsVersion = '4.2.25' + def grpcVersion = '1.68.0' + def log4jVersion = '2.24.1' + def jacksonVersion = '2.18.0' + def dropwizardMetricsVersion = '4.2.26' def opentelemetryVersion = '1.36.0' - def cloudeventsVersion = '2.5.0' - def curatorVersion = '5.6.0' - def mockitoVersion = '3.12.4' + def cloudeventsVersion = '3.0.0' + def curatorVersion = '5.7.0' + def mockitoVersion = '4.11.0' dependencyManagement { dependencies { - dependency "org.apache.commons:commons-lang3:3.6" + dependency "org.apache.commons:commons-lang3:3.17.0" dependency "org.apache.commons:commons-collections4:4.4" - dependency "org.apache.commons:commons-text:1.9" - - dependency "commons-io:commons-io:2.11.0" - - dependency "commons-validator:commons-validator:1.7" - - dependency "com.google.guava:guava:31.1-jre" + dependency "org.apache.commons:commons-text:1.12.0" + dependency "commons-io:commons-io:2.17.0" + dependency "commons-validator:commons-validator:1.9.0" + dependency "com.google.guava:guava:33.3.0-jre" dependency "org.slf4j:slf4j-api:2.0.13" dependency "org.apache.logging.log4j:log4j-api:${log4jVersion}" dependency "org.apache.logging.log4j:log4j-core:${log4jVersion}" dependency "org.apache.logging.log4j:log4j-slf4j2-impl:${log4jVersion}" - dependency "com.lmax:disruptor:3.4.2" + dependency "com.lmax:disruptor:3.4.4" dependency "com.fasterxml.jackson.core:jackson-databind:${jacksonVersion}" dependency "com.fasterxml.jackson.core:jackson-core:${jacksonVersion}" @@ -684,10 +730,10 @@ subprojects { dependency "com.squareup.okhttp3:okhttp:3.14.9" - dependency "org.asynchttpclient:async-http-client:2.12.0" + dependency "org.asynchttpclient:async-http-client:2.12.3" dependency "org.apache.httpcomponents:httpclient:4.5.14" - dependency "io.netty:netty-all:4.1.79.Final" + dependency "io.netty:netty-all:4.1.112.Final" dependency "io.dropwizard.metrics:metrics-core:${dropwizardMetricsVersion}" dependency "io.dropwizard.metrics:metrics-healthchecks:${dropwizardMetricsVersion}" @@ -704,16 +750,16 @@ subprojects { dependency "io.openmessaging:openmessaging-api:2.2.1-pubsub" - dependency "com.h3xstream.findsecbugs:findsecbugs-plugin:1.11.0" - dependency "com.mebigfatguy.fb-contrib:fb-contrib:7.6.0" + dependency "com.h3xstream.findsecbugs:findsecbugs-plugin:1.13.0" + dependency "com.mebigfatguy.fb-contrib:fb-contrib:7.6.4" dependency "com.jayway.jsonpath:json-path:2.9.0" dependency "org.springframework.boot:spring-boot-starter-web:2.7.18" dependency "io.openmessaging:registry-server:0.0.1" - dependency "org.junit.jupiter:junit-jupiter:5.6.0" + dependency "org.junit.jupiter:junit-jupiter:5.11.0" dependency "org.junit-pioneer:junit-pioneer:1.9.1" - dependency "org.assertj:assertj-core:2.6.0" + dependency "org.assertj:assertj-core:3.26.3" dependency "org.mockito:mockito-core:${mockitoVersion}" dependency "org.mockito:mockito-inline:${mockitoVersion}" @@ -731,12 +777,12 @@ subprojects { dependency "javax.annotation:javax.annotation-api:1.3.2" - dependency "com.github.seancfoley:ipaddress:5.3.3" + dependency "com.github.seancfoley:ipaddress:5.5.0" dependency "com.google.code.gson:gson:2.11.0" dependency "org.javassist:javassist:3.30.2-GA" - dependency "com.alibaba.nacos:nacos-client:2.3.2" + dependency "com.alibaba.nacos:nacos-client:2.4.1" dependency 'org.apache.zookeeper:zookeeper:3.9.2' dependency "org.apache.curator:curator-client:${curatorVersion}" @@ -744,13 +790,18 @@ subprojects { dependency "org.apache.curator:curator-recipes:${curatorVersion}" dependency "org.apache.curator:curator-test:${curatorVersion}" - dependency "org.projectlombok:lombok:1.18.32" - dependency "com.github.seancfoley:ipaddress:5.3.3" + dependency "org.projectlombok:lombok:1.18.34" dependency "javax.annotation:javax.annotation-api:1.3.2" - dependency "com.alibaba.fastjson2:fastjson2:2.0.48" + dependency "com.alibaba.fastjson2:fastjson2:2.0.52" - dependency "software.amazon.awssdk:s3:2.25.55" + dependency "software.amazon.awssdk:s3:2.29.5" dependency "com.github.rholder:guava-retrying:2.0.0" + + dependency "com.alibaba:druid-spring-boot-starter:1.2.23" + dependency "com.baomidou:mybatis-plus-boot-starter:3.5.7" + dependency "com.mysql:mysql-connector-j:8.4.0" + dependency "org.springframework.boot:spring-boot-starter-jetty:2.7.18" + dependency "org.locationtech.jts:jts-core:1.20.0" } } } diff --git a/eventmesh-admin-server/.gitignore b/eventmesh-admin-server/.gitignore new file mode 100644 index 0000000000..b63da4551b --- /dev/null +++ b/eventmesh-admin-server/.gitignore @@ -0,0 +1,42 @@ +.gradle +build/ +!gradle/wrapper/gradle-wrapper.jar +!**/src/main/**/build/ +!**/src/test/**/build/ + +### IntelliJ IDEA ### +.idea/modules.xml +.idea/jarRepositories.xml +.idea/compiler.xml +.idea/libraries/ +*.iws +*.iml +*.ipr +out/ +!**/src/main/**/out/ +!**/src/test/**/out/ + +### Eclipse ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache +bin/ +!**/src/main/**/bin/ +!**/src/test/**/bin/ + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ + +### VS Code ### +.vscode/ + +### Mac OS ### +.DS_Store \ No newline at end of file diff --git a/eventmesh-admin-server/bin/start-admin.sh b/eventmesh-admin-server/bin/start-admin.sh new file mode 100644 index 0000000000..1633036617 --- /dev/null +++ b/eventmesh-admin-server/bin/start-admin.sh @@ -0,0 +1,200 @@ +#!/bin/bash +# +# Licensed to Apache Software Foundation (ASF) under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Apache Software Foundation (ASF) licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +#=========================================================================================== +# Java Environment Setting +#=========================================================================================== +set -e +# Server configuration may be inconsistent, add these configurations to avoid garbled code problems +export LANG=en_US.UTF-8 +export LC_CTYPE=en_US.UTF-8 +export LC_ALL=en_US.UTF-8 + +TMP_JAVA_HOME="/customize/your/java/home/here" + +# Detect operating system. +OS=$(uname) + +function is_java8_or_11 { + local _java="$1" + [[ -x "$_java" ]] || return 1 + [[ "$("$_java" -version 2>&1)" =~ 'java version "1.8' || "$("$_java" -version 2>&1)" =~ 'openjdk version "1.8' || "$("$_java" -version 2>&1)" =~ 'java version "11' || "$("$_java" -version 2>&1)" =~ 'openjdk version "11' ]] || return 2 + return 0 +} + +function extract_java_version { + local _java="$1" + local version=$("$_java" -version 2>&1 | awk -F '"' '/version/ {print $2}' | awk -F '.' '{if ($1 == 1 && $2 == 8) print "8"; else if ($1 == 11) print "11"; else print "unknown"}') + echo "$version" +} + +# 0(not running), 1(is running) +#function is_proxyRunning { +# local _pid="$1" +# local pid=`ps ax | grep -i 'org.apache.eventmesh.runtime.boot.EventMeshStartup' |grep java | grep -v grep | awk '{print $1}'|grep $_pid` +# if [ -z "$pid" ] ; then +# return 0 +# else +# return 1 +# fi +#} + +function get_pid { + local ppid="" + if [ -f ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file ]; then + ppid=$(cat ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file) + # If the process does not exist, it indicates that the previous process terminated abnormally. + if [ ! -d /proc/$ppid ]; then + # Remove the residual file. + rm ${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file + echo -e "ERROR\t EventMesh process had already terminated unexpectedly before, please check log output." + ppid="" + fi + else + if [[ $OS =~ Msys ]]; then + # There is a Bug on Msys that may not be able to kill the identified process + ppid=`jps -v | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep java | grep -v grep | awk -F ' ' {'print $1'}` + elif [[ $OS =~ Darwin ]]; then + # Known problem: grep Java may not be able to accurately identify Java processes + ppid=$(/bin/ps -o user,pid,command | grep "java" | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep -Ev "^root" |awk -F ' ' {'print $2'}) + else + if [ $DOCKER ]; then + # No need to exclude root user in Docker containers. + ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_ADMIN_HOME | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | awk -F ' ' {'print $2'}) + else + # It is required to identify the process as accurately as possible on Linux. + ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_ADMIN_HOME | grep -i "org.apache.eventmesh.admin.server.ExampleAdminServer" | grep -Ev "^root" | awk -F ' ' {'print $2'}) + fi + fi + fi + echo "$ppid"; +} + +#=========================================================================================== +# Locate Java Executable +#=========================================================================================== + +if [[ -d "$TMP_JAVA_HOME" ]] && is_java8_or_11 "$TMP_JAVA_HOME/bin/java"; then + JAVA="$TMP_JAVA_HOME/bin/java" + JAVA_VERSION=$(extract_java_version "$TMP_JAVA_HOME/bin/java") +elif [[ -d "$JAVA_HOME" ]] && is_java8_or_11 "$JAVA_HOME/bin/java"; then + JAVA="$JAVA_HOME/bin/java" + JAVA_VERSION=$(extract_java_version "$JAVA_HOME/bin/java") +elif is_java8_or_11 "$(which java)"; then + JAVA="$(which java)" + JAVA_VERSION=$(extract_java_version "$(which java)") +else + echo -e "ERROR\t Java 8 or 11 not found, operation abort." + exit 9; +fi + +echo "EventMesh using Java version: $JAVA_VERSION, path: $JAVA" + +EVENTMESH_ADMIN_HOME=$(cd "$(dirname "$0")/.." && pwd) +export EVENTMESH_ADMIN_HOME + +EVENTMESH_ADMIN_LOG_HOME="${EVENTMESH_ADMIN_HOME}/logs" +export EVENTMESH_ADMIN_LOG_HOME + +echo -e "EVENTMESH_ADMIN_HOME : ${EVENTMESH_ADMIN_HOME}\nEVENTMESH_ADMIN_LOG_HOME : ${EVENTMESH_ADMIN_LOG_HOME}" + +function make_logs_dir { + if [ ! -e "${EVENTMESH_ADMIN_LOG_HOME}" ]; then mkdir -p "${EVENTMESH_ADMIN_LOG_HOME}"; fi +} + +error_exit () +{ + echo -e "ERROR\t $1 !!" + exit 1 +} + +export JAVA_HOME + +#=========================================================================================== +# JVM Configuration +#=========================================================================================== +#if [ $1 = "prd" -o $1 = "benchmark" ]; then JAVA_OPT="${JAVA_OPT} -server -Xms2048M -Xmx4096M -Xmn2048m -XX:SurvivorRatio=4" +#elif [ $1 = "sit" ]; then JAVA_OPT="${JAVA_OPT} -server -Xms256M -Xmx512M -Xmn256m -XX:SurvivorRatio=4" +#elif [ $1 = "dev" ]; then JAVA_OPT="${JAVA_OPT} -server -Xms128M -Xmx256M -Xmn128m -XX:SurvivorRatio=4" +#fi + +GC_LOG_FILE="${EVENTMESH_ADMIN_LOG_HOME}/eventmesh_admin_gc_%p.log" + +JAVA_OPT="${JAVA_OPT} -server -Xms1g -Xmx1g" +JAVA_OPT="${JAVA_OPT} -XX:+UseG1GC -XX:G1HeapRegionSize=16m -XX:G1ReservePercent=25 -XX:InitiatingHeapOccupancyPercent=30 -XX:SoftRefLRUPolicyMSPerMB=0 -XX:SurvivorRatio=8 -XX:MaxGCPauseMillis=50" +JAVA_OPT="${JAVA_OPT} -verbose:gc" +if [[ "$JAVA_VERSION" == "8" ]]; then + # Set JAVA_OPT for Java 8 + JAVA_OPT="${JAVA_OPT} -Xloggc:${GC_LOG_FILE} -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=30m" + JAVA_OPT="${JAVA_OPT} -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCApplicationStoppedTime -XX:+PrintAdaptiveSizePolicy" +elif [[ "$JAVA_VERSION" == "11" ]]; then + # Set JAVA_OPT for Java 11 + XLOG_PARAM="time,level,tags:filecount=5,filesize=30m" + JAVA_OPT="${JAVA_OPT} -Xlog:gc*:${GC_LOG_FILE}:${XLOG_PARAM}" + JAVA_OPT="${JAVA_OPT} -Xlog:safepoint:${GC_LOG_FILE}:${XLOG_PARAM} -Xlog:ergo*=debug:${GC_LOG_FILE}:${XLOG_PARAM}" +fi +JAVA_OPT="${JAVA_OPT} -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${EVENTMESH_ADMIN_LOG_HOME} -XX:ErrorFile=${EVENTMESH_ADMIN_LOG_HOME}/hs_err_%p.log" +JAVA_OPT="${JAVA_OPT} -XX:-OmitStackTraceInFastThrow" +JAVA_OPT="${JAVA_OPT} -XX:+AlwaysPreTouch" +JAVA_OPT="${JAVA_OPT} -XX:MaxDirectMemorySize=8G" +JAVA_OPT="${JAVA_OPT} -XX:-UseLargePages -XX:-UseBiasedLocking" +JAVA_OPT="${JAVA_OPT} -Dio.netty.leakDetectionLevel=advanced" +JAVA_OPT="${JAVA_OPT} -Dio.netty.allocator.type=pooled" +JAVA_OPT="${JAVA_OPT} -Djava.security.egd=file:/dev/./urandom" +JAVA_OPT="${JAVA_OPT} -Dlog4j.configurationFile=${EVENTMESH_ADMIN_HOME}/conf/log4j2.xml" +JAVA_OPT="${JAVA_OPT} -Deventmesh.log.home=${EVENTMESH_ADMIN_LOG_HOME}" +JAVA_OPT="${JAVA_OPT} -DconfPath=${EVENTMESH_ADMIN_HOME}/conf" +JAVA_OPT="${JAVA_OPT} -DconfigurationPath=${EVENTMESH_ADMIN_HOME}/conf" +JAVA_OPT="${JAVA_OPT} -Dlog4j2.AsyncQueueFullPolicy=Discard" +JAVA_OPT="${JAVA_OPT} -Drocketmq.client.logUseSlf4j=true" +JAVA_OPT="${JAVA_OPT} -DeventMeshPluginDir=${EVENTMESH_ADMIN_HOME}/plugin" + +#if [ -f "pid.file" ]; then +# pid=`cat pid.file` +# if ! is_proxyRunning "$pid"; then +# echo "proxy is running already" +# exit 9; +# else +# echo "err pid$pid, rm pid.file" +# rm pid.file +# fi +#fi + +pid=$(get_pid) +if [[ $pid == "ERROR"* ]]; then + echo -e "${pid}" + exit 9 +fi +if [ -n "$pid" ]; then + echo -e "ERROR\t The server is already running (pid=$pid), there is no need to execute start.sh again." + exit 9 +fi + +make_logs_dir + +echo "Using Java version: $JAVA_VERSION, path: $JAVA" >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out + +EVENTMESH_ADMIN_MAIN=org.apache.eventmesh.admin.server.ExampleAdminServer +if [ $DOCKER ]; then + $JAVA $JAVA_OPT -classpath ${EVENTMESH_ADMIN_HOME}/conf:${EVENTMESH_ADMIN_HOME}/apps/*:${EVENTMESH_ADMIN_HOME}/lib/* $EVENTMESH_ADMIN_MAIN >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out +else + $JAVA $JAVA_OPT -classpath ${EVENTMESH_ADMIN_HOME}/conf:${EVENTMESH_ADMIN_HOME}/apps/*:${EVENTMESH_ADMIN_HOME}/lib/* $EVENTMESH_ADMIN_MAIN >> ${EVENTMESH_ADMIN_LOG_HOME}/eventmesh-admin.out 2>&1 & +echo $!>${EVENTMESH_ADMIN_HOME}/bin/pid-admin.file +fi +exit 0 diff --git a/eventmesh-admin-server/build.gradle b/eventmesh-admin-server/build.gradle new file mode 100644 index 0000000000..fdfe1bffe8 --- /dev/null +++ b/eventmesh-admin-server/build.gradle @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +dependencies { + implementation project(":eventmesh-spi") + implementation project(":eventmesh-common") + implementation project(":eventmesh-registry:eventmesh-registry-api") + implementation project(":eventmesh-registry:eventmesh-registry-nacos") + implementation project(":eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api") + implementation "com.alibaba.nacos:nacos-client" + implementation("org.springframework.boot:spring-boot-starter-web") { + exclude group: "org.springframework.boot", module: "spring-boot-starter-tomcat" + } + implementation 'org.springframework.boot:spring-boot-starter-jetty' + implementation "io.grpc:grpc-core" + implementation "io.grpc:grpc-protobuf" + implementation "io.grpc:grpc-stub" + implementation "io.grpc:grpc-netty-shaded" + + // https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-boot-starter + implementation "com.baomidou:mybatis-plus-boot-starter" + + // https://mvnrepository.com/artifact/com.alibaba/druid-spring-boot-starter + implementation "com.alibaba:druid-spring-boot-starter" + compileOnly 'com.mysql:mysql-connector-j' + compileOnly 'org.projectlombok:lombok' + testImplementation 'junit:junit:4.13.2' + testImplementation 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' +} + +configurations.implementation { + exclude group: "org.springframework.boot", module: "spring-boot-starter-logging" +} + +sourceSets { + main { + resources { + srcDirs = ['src/main/resources', 'conf'] + } + } +} + diff --git a/eventmesh-admin-server/conf/application.yaml b/eventmesh-admin-server/conf/application.yaml new file mode 100644 index 0000000000..3d702e579e --- /dev/null +++ b/eventmesh-admin-server/conf/application.yaml @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +spring: + datasource: + url: jdbc:mysql://localhost:3306/eventmesh?serverTimezone=GMT%2B8&characterEncoding=utf-8&useSSL=false&allowPublicKeyRetrieval=true + username: //db_username + password: //db_password + driver-class-name: com.mysql.cj.jdbc.Driver +mybatis-plus: + mapper-locations: classpath:mapper/*.xml + configuration: + map-underscore-to-camel-case: false + log-impl: org.apache.ibatis.logging.stdout.StdOutImpl +# http server port +server: + port: 8082 +event-mesh: + admin-server: + serviceName: DEFAULT_GROUP@@em_adm_server + # grpc server port + port: 8081 + adminServerList: + R1: + - http://localhost:8082 + R2: + - http://localhost:8082 + region: R1 \ No newline at end of file diff --git a/eventmesh-admin-server/conf/eventmesh-admin.properties b/eventmesh-admin-server/conf/eventmesh-admin.properties new file mode 100644 index 0000000000..07a6a212e7 --- /dev/null +++ b/eventmesh-admin-server/conf/eventmesh-admin.properties @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +eventMesh.server.retry.plugin.type=nacos +eventMesh.registry.plugin.server-addr=localhost:8848 diff --git a/eventmesh-admin-server/conf/eventmesh.sql b/eventmesh-admin-server/conf/eventmesh.sql new file mode 100644 index 0000000000..6e28daca8a --- /dev/null +++ b/eventmesh-admin-server/conf/eventmesh.sql @@ -0,0 +1,152 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET NAMES utf8 */; +/*!50503 SET NAMES utf8 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + + +-- export eventmesh database +CREATE DATABASE IF NOT EXISTS `eventmesh` /*!40100 DEFAULT CHARACTER SET utf8 COLLATE utf8_bin */ /*!80016 DEFAULT ENCRYPTION='N' */; +USE `eventmesh`; + +-- export table eventmesh.event_mesh_data_source structure +CREATE TABLE IF NOT EXISTS `event_mesh_data_source` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `dataType` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `description` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `configuration` text CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `configurationClass` varchar(200) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `region` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `updateUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- export table eventmesh.event_mesh_job_info structure +CREATE TABLE IF NOT EXISTS `event_mesh_job_info` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `jobID` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `jobDesc` varchar(50) COLLATE utf8_bin NOT NULL, + `taskID` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `transportType` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `sourceData` int NOT NULL DEFAULT '0', + `targetData` int NOT NULL DEFAULT '0', + `jobState` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `jobType` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `fromRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `runningRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createUid` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `updateUid` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `jobID` (`jobID`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- export table eventmesh.event_mesh_mysql_position structure +CREATE TABLE IF NOT EXISTS `event_mesh_mysql_position` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `jobID` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `serverUUID` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `address` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `position` bigint DEFAULT NULL, + `gtid` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `currentGtid` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `timestamp` bigint DEFAULT NULL, + `journalName` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL, + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `jobID` (`jobID`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=DYNAMIC; + +-- export table eventmesh.event_mesh_position_reporter_history structure +CREATE TABLE IF NOT EXISTS `event_mesh_position_reporter_history` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `job` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `record` text CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `address` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + KEY `job` (`job`), + KEY `address` (`address`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='record position reporter changes'; + +-- export table eventmesh.event_mesh_runtime_heartbeat structure +CREATE TABLE IF NOT EXISTS `event_mesh_runtime_heartbeat` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `adminAddr` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `runtimeAddr` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `jobID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `reportTime` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL COMMENT 'runtime local report time', + `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + KEY `jobID` (`jobID`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- export table eventmesh.event_mesh_runtime_history structure +CREATE TABLE IF NOT EXISTS `event_mesh_runtime_history` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `job` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `address` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '', + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + KEY `address` (`address`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=DYNAMIC COMMENT='record runtime task change history'; + +-- export table eventmesh.event_mesh_task_info structure +CREATE TABLE IF NOT EXISTS `event_mesh_task_info` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `taskID` varchar(50) COLLATE utf8_bin NOT NULL, + `taskName` varchar(50) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, + `taskDesc` varchar(50) COLLATE utf8_bin NOT NULL, + `taskState` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '' COMMENT 'taskstate', + `sourceRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `targetRegion` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `createUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `updateUid` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT '', + `createTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `taskID` (`taskID`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- export table eventmesh.event_mesh_verify structure +CREATE TABLE IF NOT EXISTS `event_mesh_verify` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `taskID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `jobID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `recordID` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `recordSig` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `connectorName` varchar(200) COLLATE utf8_bin DEFAULT NULL, + `connectorStage` varchar(50) COLLATE utf8_bin DEFAULT NULL, + `position` text COLLATE utf8_bin DEFAULT NULL, + `createTime` timestamp NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; +/*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */; diff --git a/eventmesh-admin-server/conf/log4j2.xml b/eventmesh-admin-server/conf/log4j2.xml new file mode 100644 index 0000000000..6341a0e629 --- /dev/null +++ b/eventmesh-admin-server/conf/log4j2.xml @@ -0,0 +1,108 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml new file mode 100644 index 0000000000..50e6ad82cc --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshDataSourceMapper.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + id,dataType,description, + configuration,configurationClass,region, + createUid,updateUid,createTime,updateTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml new file mode 100644 index 0000000000..a053d1c838 --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshJobInfoMapper.xml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + id,jobID,jobDesc, + taskID,transportType,sourceData, + targetData,jobState,jobType, + fromRegion,runningRegion,createUid, + updateUid,createTime,updateTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshMysqlPositionMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshMysqlPositionMapper.xml new file mode 100644 index 0000000000..9bcc7f42bb --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshMysqlPositionMapper.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + id,jobID,serverUUID, + address,position,gtid, + currentGtid,timestamp,journalName, + createTime,updateTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshPositionReporterHistoryMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshPositionReporterHistoryMapper.xml new file mode 100644 index 0000000000..a9e4fe6f1b --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshPositionReporterHistoryMapper.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + id,job,record, + address,createTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshRuntimeHeartbeatMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshRuntimeHeartbeatMapper.xml new file mode 100644 index 0000000000..200b1bf54a --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshRuntimeHeartbeatMapper.xml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + id,adminAddr,runtimeAddr, + jobID,reportTime,updateTime, + createTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshRuntimeHistoryMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshRuntimeHistoryMapper.xml new file mode 100644 index 0000000000..281cce30f9 --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshRuntimeHistoryMapper.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + id,job,address, + createTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml new file mode 100644 index 0000000000..c3514fd945 --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshTaskInfoMapper.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + id,taskID,taskName, + taskDesc,taskState,sourceRegion,targetRegion, + createUid,updateUid,createTime, + updateTime + + diff --git a/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml b/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml new file mode 100644 index 0000000000..45727498cc --- /dev/null +++ b/eventmesh-admin-server/conf/mapper/EventMeshVerifyMapper.xml @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + + + + + id,taskID,jobID,recordID, + recordSig,connectorName,connectorStage, + position,createTime + + diff --git a/eventmesh-admin-server/gradle.properties b/eventmesh-admin-server/gradle.properties new file mode 100644 index 0000000000..a9fd83fea0 --- /dev/null +++ b/eventmesh-admin-server/gradle.properties @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java new file mode 100644 index 0000000000..612d398078 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerProperties.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server; + +import java.util.List; +import java.util.Map; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +import lombok.Getter; +import lombok.Setter; + +@ConfigurationProperties("event-mesh.admin-server") +@Getter +@Setter +public class AdminServerProperties { + + private int port; + private boolean enableSSL; + private String configurationPath; + private String configurationFile; + private String serviceName; + private Map> adminServerList; + private String region; +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerRuntimeException.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerRuntimeException.java new file mode 100644 index 0000000000..e68d05100f --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/AdminServerRuntimeException.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server; + +import lombok.Getter; + +@Getter +public class AdminServerRuntimeException extends RuntimeException { + private final int code; + + public AdminServerRuntimeException(int code, String message) { + super(message); + this.code = code; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/ExampleAdminServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/ExampleAdminServer.java new file mode 100644 index 0000000000..b179a790c5 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/ExampleAdminServer.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server; + +import org.apache.eventmesh.admin.server.constatns.AdminServerConstants; +import org.apache.eventmesh.common.config.ConfigService; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@SpringBootApplication(scanBasePackages = "org.apache.eventmesh.admin.server") +public class ExampleAdminServer { + + public static void main(String[] args) throws Exception { + ConfigService.getInstance().setConfigPath(AdminServerConstants.EVENTMESH_CONF_HOME).setRootConfig(AdminServerConstants.EVENTMESH_CONF_FILE); + SpringApplication.run(ExampleAdminServer.class); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/constatns/AdminServerConstants.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/constatns/AdminServerConstants.java new file mode 100644 index 0000000000..44afaca1c2 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/constatns/AdminServerConstants.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.constatns; + +public class AdminServerConstants { + public static final String CONF_ENV = "configurationPath"; + + public static final String EVENTMESH_CONF_HOME = System.getProperty(CONF_ENV, System.getenv(CONF_ENV)); + + public static final String EVENTMESH_CONF_FILE = "eventmesh-admin.properties"; +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/BaseServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/BaseServer.java new file mode 100644 index 0000000000..9bbe4ce305 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/BaseServer.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web; + +import org.apache.eventmesh.common.ComponentLifeCycle; +import org.apache.eventmesh.common.remote.payload.PayloadFactory; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public abstract class BaseServer implements ComponentLifeCycle { + + static { + PayloadFactory.getInstance().init(); + } + + @PostConstruct + public void init() throws Exception { + log.info("[{}] server starting at port [{}]", this.getClass().getSimpleName(), getPort()); + start(); + log.info("[{}] server started at port [{}]", this.getClass().getSimpleName(), getPort()); + } + + @PreDestroy + public void shutdown() throws Exception { + log.info("[{}] server will destroy", this.getClass().getSimpleName()); + stop(); + log.info("[{}] server has be destroy", this.getClass().getSimpleName()); + } + + public abstract int getPort(); +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/GrpcServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/GrpcServer.java new file mode 100644 index 0000000000..d2a0330355 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/GrpcServer.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web; + +import org.apache.eventmesh.admin.server.AdminServerProperties; +import org.apache.eventmesh.admin.server.web.service.AdminGrpcServer; + +import java.util.concurrent.TimeUnit; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Controller; + +import io.grpc.Server; +import io.grpc.netty.shaded.io.grpc.netty.NettyServerBuilder; + +import lombok.extern.slf4j.Slf4j; + +@Controller +@Slf4j +public class GrpcServer extends BaseServer { + + @Autowired + AdminGrpcServer adminGrpcServer; + + @Autowired + AdminServerProperties properties; + + private Server server; + + @Override + public void start() throws Exception { + NettyServerBuilder serverBuilder = NettyServerBuilder.forPort(getPort()).addService(adminGrpcServer); + if (properties.isEnableSSL()) { + serverBuilder.sslContext(null); + } + server = serverBuilder.build(); + server.start(); + } + + @Override + public void stop() { + try { + if (server != null) { + server.shutdown(); + if (!server.awaitTermination(30, TimeUnit.SECONDS)) { + log.warn("[{}] server don't graceful stop in 30s, it will shutdown now", this.getClass().getSimpleName()); + server.shutdownNow(); + } + } + } catch (InterruptedException e) { + log.warn("destroy [{}] server fail", this.getClass().getSimpleName(), e); + } + } + + @Override + public int getPort() { + return properties.getPort(); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java new file mode 100644 index 0000000000..2454e9f02c --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/HttpServer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web; + +import org.apache.eventmesh.admin.server.web.service.task.TaskBizService; +import org.apache.eventmesh.admin.server.web.service.verify.VerifyBizService; +import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.remote.request.ReportVerifyRequest; +import org.apache.eventmesh.common.remote.response.CreateTaskResponse; +import org.apache.eventmesh.common.utils.JsonUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import lombok.extern.slf4j.Slf4j; + +@RestController +@RequestMapping("/eventmesh/admin") +@Slf4j +public class HttpServer { + + @Autowired + private TaskBizService taskService; + + @Autowired + private VerifyBizService verifyService; + + @RequestMapping(value = "/createTask", method = RequestMethod.POST) + public ResponseEntity createOrUpdateTask(@RequestBody CreateTaskRequest task) { + log.info("receive http proto create task:{}", task); + CreateTaskResponse createTaskResponse = taskService.createTask(task); + log.info("receive http proto create task result:{}", createTaskResponse); + return ResponseEntity.ok(JsonUtils.toJSONString(Response.success(createTaskResponse))); + } + + + @RequestMapping(value = "/reportVerify", method = RequestMethod.POST) + public ResponseEntity reportVerify(@RequestBody ReportVerifyRequest request) { + log.info("receive http proto report verify request:{}", request); + boolean result = verifyService.reportVerifyRecord(request); + log.info("receive http proto report verify result:{}", result); + if (result) { + return ResponseEntity.ok("report verify success.request:" + JsonUtils.toJSONString(request)); + } else { + return ResponseEntity.internalServerError().body("report verify success.request:" + JsonUtils.toJSONString(request)); + } + } + + public boolean deleteTask(Long id) { + return false; + } + + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Request.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Request.java new file mode 100644 index 0000000000..9484e986f1 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Request.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web; + +public class Request { + private String uid; + private T data; + + public String getUid() { + return uid; + } + + public void setUid(String uid) { + this.uid = uid; + } + + public T getData() { + return data; + } + + public void setData(T data) { + this.data = data; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java new file mode 100644 index 0000000000..d573c3bac4 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/Response.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web; + +import org.apache.eventmesh.common.remote.exception.ErrorCode; + +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class Response { + + private int code; + + private boolean success; + + private String desc; + + private T data; + + public static Response success() { + Response response = new Response<>(); + response.success = true; + response.code = ErrorCode.SUCCESS; + return response; + } + + public static Response success(T data) { + Response response = new Response<>(); + response.success = true; + response.data = data; + return response; + } + + public static Response fail(int code, String desc) { + Response response = new Response<>(); + response.success = false; + response.code = code; + response.desc = desc; + return response; + } + + public static Response fail(int code, String desc, T data) { + Response response = new Response<>(); + response.success = false; + response.code = code; + response.desc = desc; + response.data = data; + return response; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/ServerController.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/ServerController.java new file mode 100644 index 0000000000..5623cbad33 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/ServerController.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web; + +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("/eventmesh/admin") +public class ServerController { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java new file mode 100644 index 0000000000..277ea66656 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/DBThreadPool.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db; + +import org.apache.eventmesh.common.EventMeshThreadFactory; + +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import javax.annotation.PreDestroy; + +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class DBThreadPool { + + private final ThreadPoolExecutor executor = + new ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, + Runtime.getRuntime().availableProcessors() * 2, 0L, TimeUnit.SECONDS, + new LinkedBlockingQueue<>(1000), new EventMeshThreadFactory("admin-server-db"), + new ThreadPoolExecutor.DiscardOldestPolicy()); + + + private final ScheduledThreadPoolExecutor checkScheduledExecutor = + new ScheduledThreadPoolExecutor(Runtime.getRuntime().availableProcessors(), new EventMeshThreadFactory("admin-server-check-scheduled"), + new ThreadPoolExecutor.DiscardOldestPolicy()); + + @PreDestroy + private void destroy() { + if (!executor.isShutdown()) { + try { + executor.shutdown(); + if (!executor.awaitTermination(30, TimeUnit.SECONDS)) { + log.info("wait handler thread pool shutdown timeout, it will shutdown immediately"); + executor.shutdownNow(); + } + } catch (InterruptedException e) { + log.warn("wait handler thread pool shutdown fail"); + } + } + + if (!checkScheduledExecutor.isShutdown()) { + try { + checkScheduledExecutor.shutdown(); + if (!checkScheduledExecutor.awaitTermination(30, TimeUnit.SECONDS)) { + log.info("wait scheduled thread pool shutdown timeout, it will shutdown immediately"); + checkScheduledExecutor.shutdownNow(); + } + } catch (InterruptedException e) { + log.warn("wait scheduled thread pool shutdown fail"); + } + } + } + + public ThreadPoolExecutor getExecutors() { + return executor; + } + + public ScheduledThreadPoolExecutor getCheckExecutor() { + return checkScheduledExecutor; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java new file mode 100644 index 0000000000..e6e328984c --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshDataSource.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_data_source + */ +@TableName(value = "event_mesh_data_source") +@Data +public class EventMeshDataSource implements Serializable { + @TableId(type = IdType.AUTO) + private Integer id; + + private String dataType; + + private String description; + + private String configuration; + + private String configurationClass; + + private String region; + + private String createUid; + + private String updateUid; + + private Date createTime; + + private Date updateTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java new file mode 100644 index 0000000000..a77eaaaca2 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshJobInfo.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_job_info + */ +@TableName(value = "event_mesh_job_info") +@Data +public class EventMeshJobInfo implements Serializable { + @TableId(type = IdType.AUTO) + private Integer id; + + private String jobID; + + private String jobDesc; + + private String taskID; + + private String transportType; + + private Integer sourceData; + + private Integer targetData; + + private String jobState; + + private String jobType; + + // job request from region + private String fromRegion; + + // job actually running region + private String runningRegion; + + private String createUid; + + private String updateUid; + + private Date createTime; + + private Date updateTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshMysqlPosition.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshMysqlPosition.java new file mode 100644 index 0000000000..5e5d5745c1 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshMysqlPosition.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_mysql_position + */ +@TableName(value = "event_mesh_mysql_position") +@Data +public class EventMeshMysqlPosition implements Serializable { + @TableId(type = IdType.AUTO) + private Integer id; + + private String jobID; + + private String serverUUID; + + private String address; + + private Long position; + + private String gtid; + + private String currentGtid; + + private Long timestamp; + + private String journalName; + + private Date createTime; + + private Date updateTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshPositionReporterHistory.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshPositionReporterHistory.java new file mode 100644 index 0000000000..8518c38918 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshPositionReporterHistory.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_position_reporter_history + */ +@TableName(value = "event_mesh_position_reporter_history") +@Data +public class EventMeshPositionReporterHistory implements Serializable { + @TableId(type = IdType.AUTO) + private Long id; + + private String job; + + private String record; + + private String address; + + private Date createTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshRuntimeHeartbeat.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshRuntimeHeartbeat.java new file mode 100644 index 0000000000..95e6c5e261 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshRuntimeHeartbeat.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_runtime_heartbeat + */ +@TableName(value = "event_mesh_runtime_heartbeat") +@Data +public class EventMeshRuntimeHeartbeat implements Serializable { + @TableId(type = IdType.AUTO) + private Long id; + + private String adminAddr; + + private String runtimeAddr; + + private String jobID; + + private String reportTime; + + private Date updateTime; + + private Date createTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshRuntimeHistory.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshRuntimeHistory.java new file mode 100644 index 0000000000..ea7e10cbad --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshRuntimeHistory.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_runtime_history + */ +@TableName(value = "event_mesh_runtime_history") +@Data +public class EventMeshRuntimeHistory implements Serializable { + @TableId(type = IdType.AUTO) + private Long id; + + private String job; + + private String address; + + private Date createTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java new file mode 100644 index 0000000000..2d40f4a082 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshTaskInfo.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_task_info + */ +@TableName(value = "event_mesh_task_info") +@Data +public class EventMeshTaskInfo implements Serializable { + @TableId(type = IdType.AUTO) + private Integer id; + + private String taskID; + + private String taskName; + + private String taskDesc; + + private String taskState; + + private String sourceRegion; + + private String targetRegion; + + private String createUid; + + private String updateUid; + + private Date createTime; + + private Date updateTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java new file mode 100644 index 0000000000..c5a6c35f8d --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/entity/EventMeshVerify.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import lombok.Data; + +/** + * TableName event_mesh_verify + */ +@TableName(value = "event_mesh_verify") +@Data +public class EventMeshVerify implements Serializable { + + @TableId(type = IdType.AUTO) + private Integer id; + + private String taskID; + + private String jobID; + + private String recordID; + + private String recordSig; + + private String connectorName; + + private String connectorStage; + + private String position; + + private Date createTime; + + private static final long serialVersionUID = 1L; +} \ No newline at end of file diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshDataSourceMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshDataSourceMapper.java new file mode 100644 index 0000000000..c59e28428f --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshDataSourceMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * for table 'event_mesh_data_source' db operation + * 2024-05-09 15:52:49 + * entity.db.web.server.admin.eventmesh.apache.org.EventMeshDataSource + */ +@Mapper +public interface EventMeshDataSourceMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java new file mode 100644 index 0000000000..c04c4e3748 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoExtMapper.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; + +import org.apache.ibatis.annotations.Insert; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * etx operator for table event_mesh_job_info + */ +@Mapper +public interface EventMeshJobInfoExtMapper extends BaseMapper { + + @Insert("") + @Transactional(rollbackFor = Exception.class) + int saveBatch(@Param("jobs") List jobInfoList); +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoMapper.java new file mode 100644 index 0000000000..39f8a4aed6 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshJobInfoMapper.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * for table 'event_mesh_job_info' db operation + * entity org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo + */ +@Mapper +public interface EventMeshJobInfoMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshMysqlPositionMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshMysqlPositionMapper.java new file mode 100644 index 0000000000..f0a0467d76 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshMysqlPositionMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshMysqlPosition; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * for table `event_mesh_mysql_position` db operation + * 2024-05-14 17:15:03 + * entity.db.web.server.admin.eventmesh.apache.org.EventMeshMysqlPosition + */ +@Mapper +public interface EventMeshMysqlPositionMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshPositionReporterHistoryMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshPositionReporterHistoryMapper.java new file mode 100644 index 0000000000..adc6723b79 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshPositionReporterHistoryMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshPositionReporterHistory; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * for table 'event_mesh_position_reporter_history' db operation + * 2024-05-14 17:15:03 + * entity.db.web.server.admin.eventmesh.apache.org.EventMeshPositionReporterHistory + */ +@Mapper +public interface EventMeshPositionReporterHistoryMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshRuntimeHeartbeatMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshRuntimeHeartbeatMapper.java new file mode 100644 index 0000000000..813769d2ab --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshRuntimeHeartbeatMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * for table 'event_mesh_runtime_heartbeat' db operation + * 2024-05-14 17:15:03 + * entity.db.web.server.admin.eventmesh.apache.org.EventMeshRuntimeHeartbeat + */ +@Mapper +public interface EventMeshRuntimeHeartbeatMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshRuntimeHistoryMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshRuntimeHistoryMapper.java new file mode 100644 index 0000000000..5fc3a21f54 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshRuntimeHistoryMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHistory; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * for table 'event_mesh_runtime_history' db operation + * 2024-05-14 17:15:03 + * entity.db.web.server.admin.eventmesh.apache.org.EventMeshRuntimeHistory + */ +@Mapper +public interface EventMeshRuntimeHistoryMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshTaskInfoMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshTaskInfoMapper.java new file mode 100644 index 0000000000..d1d472b8c4 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshTaskInfoMapper.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * event_mesh_task_info + * Entity org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo + */ +@Mapper +public interface EventMeshTaskInfoMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshVerifyMapper.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshVerifyMapper.java new file mode 100644 index 0000000000..b444d1e4b4 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/mapper/EventMeshVerifyMapper.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.mapper; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshVerify; + +import org.apache.ibatis.annotations.Mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * event_mesh_verify + * Entity org.apache.eventmesh.admin.server.web.db.entity.EventMeshVerify + */ +@Mapper +public interface EventMeshVerifyMapper extends BaseMapper { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshDataSourceService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshDataSourceService.java new file mode 100644 index 0000000000..29e2b8122e --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshDataSourceService.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** +* +* for table 'event_mesh_data_source' db operation +* 2024-05-09 15:52:49 +*/ +public interface EventMeshDataSourceService extends IService { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshJobInfoExtService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshJobInfoExtService.java new file mode 100644 index 0000000000..22fc5ae299 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshJobInfoExtService.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; + +import java.util.List; + +/** + * ext operator for table event_mesh_job + */ +public interface EventMeshJobInfoExtService { + int batchSave(List jobs); +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshJobInfoService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshJobInfoService.java new file mode 100644 index 0000000000..572e451ceb --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshJobInfoService.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * event_mesh_job_info + */ +public interface EventMeshJobInfoService extends IService { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshMysqlPositionService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshMysqlPositionService.java new file mode 100644 index 0000000000..2bf34ab922 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshMysqlPositionService.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshMysqlPosition; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** +* for table 'event_mesh_mysql_position' db operation +* 2024-05-14 17:15:03 +*/ +public interface EventMeshMysqlPositionService extends IService { +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshPositionReporterHistoryService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshPositionReporterHistoryService.java new file mode 100644 index 0000000000..976eec94ea --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshPositionReporterHistoryService.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshPositionReporterHistory; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** +* for table 'event_mesh_position_reporter_history' db operation +* 2024-05-14 17:15:03 +*/ +public interface EventMeshPositionReporterHistoryService extends IService { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshRuntimeHeartbeatService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshRuntimeHeartbeatService.java new file mode 100644 index 0000000000..f0e5d1c61b --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshRuntimeHeartbeatService.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * for table 'event_mesh_runtime_heartbeat' db operation + * 2024-05-14 17:15:03 + */ +public interface EventMeshRuntimeHeartbeatService extends IService { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshRuntimeHistoryService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshRuntimeHistoryService.java new file mode 100644 index 0000000000..1f6db0e12e --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshRuntimeHistoryService.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHistory; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * for table 'event_mesh_runtime_history' db operation + * 2024-05-14 17:15:03 + */ +public interface EventMeshRuntimeHistoryService extends IService { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshTaskInfoService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshTaskInfoService.java new file mode 100644 index 0000000000..dc35cfe071 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshTaskInfoService.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * event_mesh_task_info + */ +public interface EventMeshTaskInfoService extends IService { + +} diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshVerifyService.java similarity index 71% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java rename to eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshVerifyService.java index 8239dfcb6e..97f2d7268e 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Transformer.java +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/EventMeshVerifyService.java @@ -1,32 +1,29 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.transformer; - -import com.fasterxml.jackson.core.JsonProcessingException; - -/** - * EventMesh transformer interface, specified transformer implementation includes: - * 1. Constant - * 2. Original - * 3. Template - */ -public interface Transformer { - - String transform(String json) throws JsonProcessingException; - -} +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshVerify; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * event_mesh_verify + */ +public interface EventMeshVerifyService extends IService { + +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshDataSourceServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshDataSourceServiceImpl.java new file mode 100644 index 0000000000..f703425c94 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshDataSourceServiceImpl.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshDataSourceMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshDataSourceService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * for table 'event_mesh_data_source' db operation + * 2024-05-09 15:52:49 + */ +@Service +public class EventMeshDataSourceServiceImpl extends ServiceImpl + implements EventMeshDataSourceService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshJobInfoExtServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshJobInfoExtServiceImpl.java new file mode 100644 index 0000000000..6cf0ebf6b2 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshJobInfoExtServiceImpl.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshJobInfoExtMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshJobInfoExtService; + +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class EventMeshJobInfoExtServiceImpl implements EventMeshJobInfoExtService { + @Autowired + EventMeshJobInfoExtMapper mapper; + + @Override + public int batchSave(List jobs) { + return mapper.saveBatch(jobs); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshJobInfoServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshJobInfoServiceImpl.java new file mode 100644 index 0000000000..4613e0809d --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshJobInfoServiceImpl.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshJobInfoMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshJobInfoService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * event_mesh_job_info + */ +@Service +public class EventMeshJobInfoServiceImpl extends ServiceImpl + implements EventMeshJobInfoService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshMysqlPositionServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshMysqlPositionServiceImpl.java new file mode 100644 index 0000000000..353443b78d --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshMysqlPositionServiceImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshMysqlPosition; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshMysqlPositionMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshMysqlPositionService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +import lombok.extern.slf4j.Slf4j; + +/** + * for table 'event_mesh_mysql_position' db operation + * 2024-05-14 17:15:03 + */ +@Service +@Slf4j +public class EventMeshMysqlPositionServiceImpl extends ServiceImpl + implements EventMeshMysqlPositionService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshPositionReporterHistoryServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshPositionReporterHistoryServiceImpl.java new file mode 100644 index 0000000000..d546e412ba --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshPositionReporterHistoryServiceImpl.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshPositionReporterHistory; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshPositionReporterHistoryMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshPositionReporterHistoryService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * for table 'event_mesh_position_reporter_history' db operation + * 2024-05-14 17:15:03 + */ +@Service +public class EventMeshPositionReporterHistoryServiceImpl extends ServiceImpl + implements EventMeshPositionReporterHistoryService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshRuntimeHeartbeatServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshRuntimeHeartbeatServiceImpl.java new file mode 100644 index 0000000000..452569e3a6 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshRuntimeHeartbeatServiceImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshRuntimeHeartbeatMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshRuntimeHeartbeatService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +import lombok.extern.slf4j.Slf4j; + +/** + * for table 'event_mesh_runtime_heartbeat' db operation + * 2024-05-14 17:15:03 + */ +@Service +@Slf4j +public class EventMeshRuntimeHeartbeatServiceImpl extends ServiceImpl + implements EventMeshRuntimeHeartbeatService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshRuntimeHistoryServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshRuntimeHistoryServiceImpl.java new file mode 100644 index 0000000000..d39e868ce5 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshRuntimeHistoryServiceImpl.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHistory; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshRuntimeHistoryMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshRuntimeHistoryService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * for table 'event_mesh_runtime_history' db operation + * 2024-05-14 17:15:03 + */ +@Service +public class EventMeshRuntimeHistoryServiceImpl extends ServiceImpl + implements EventMeshRuntimeHistoryService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshTaskInfoServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshTaskInfoServiceImpl.java new file mode 100644 index 0000000000..9568b63671 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshTaskInfoServiceImpl.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshTaskInfoMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshTaskInfoService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * event_mesh_task_info + */ +@Service +public class EventMeshTaskInfoServiceImpl extends ServiceImpl + implements EventMeshTaskInfoService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java new file mode 100644 index 0000000000..5e49ba32ea --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/db/service/impl/EventMeshVerifyServiceImpl.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.db.service.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshVerify; +import org.apache.eventmesh.admin.server.web.db.mapper.EventMeshVerifyMapper; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshVerifyService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +/** + * event_mesh_verify + */ +@Service +public class EventMeshVerifyServiceImpl extends ServiceImpl + implements EventMeshVerifyService { + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/BaseRequestHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/BaseRequestHandler.java new file mode 100644 index 0000000000..7f08f388c9 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/BaseRequestHandler.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler; + +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.request.BaseRemoteRequest; +import org.apache.eventmesh.common.remote.response.BaseRemoteResponse; + +public abstract class BaseRequestHandler { + + public BaseRemoteResponse handlerRequest(T request, Metadata metadata) { + return handler(request, metadata); + } + + protected abstract S handler(T request, Metadata metadata); +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/RequestHandlerFactory.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/RequestHandlerFactory.java new file mode 100644 index 0000000000..9375fb537e --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/RequestHandlerFactory.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler; + +import org.apache.eventmesh.common.remote.request.BaseRemoteRequest; +import org.apache.eventmesh.common.remote.response.BaseRemoteResponse; + +import java.lang.reflect.ParameterizedType; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.stereotype.Component; + +@Component +public class RequestHandlerFactory implements ApplicationListener { + + private final Map> handlers = + new ConcurrentHashMap<>(); + + public BaseRequestHandler getHandler(String type) { + return handlers.get(type); + } + + @Override + @SuppressWarnings({"rawtypes", "unchecked"}) + public void onApplicationEvent(ContextRefreshedEvent event) { + Map beans = + event.getApplicationContext().getBeansOfType(BaseRequestHandler.class); + + for (BaseRequestHandler requestHandler : beans.values()) { + Class clazz = requestHandler.getClass(); + boolean skip = false; + while (!clazz.getSuperclass().equals(BaseRequestHandler.class)) { + if (clazz.getSuperclass().equals(Object.class)) { + skip = true; + break; + } + clazz = clazz.getSuperclass(); + } + if (skip) { + continue; + } + + Class c = (Class) ((ParameterizedType) clazz.getGenericSuperclass()).getActualTypeArguments()[0]; + handlers.putIfAbsent(c.getSimpleName(), requestHandler); + } + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java new file mode 100644 index 0000000000..3392084c28 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchJobRequestHandler.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.pojo.JobDetail; +import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.job.JobConnectorConfig; +import org.apache.eventmesh.common.remote.request.FetchJobRequest; +import org.apache.eventmesh.common.remote.response.FetchJobResponse; +import org.apache.eventmesh.common.utils.JsonUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class FetchJobRequestHandler extends BaseRequestHandler { + + @Autowired + JobInfoBizService jobInfoBizService; + + @Override + public FetchJobResponse handler(FetchJobRequest request, Metadata metadata) { + if (StringUtils.isBlank(request.getJobID())) { + return FetchJobResponse.failResponse(ErrorCode.BAD_REQUEST, "job id is empty"); + } + FetchJobResponse response = FetchJobResponse.successResponse(); + JobDetail detail = jobInfoBizService.getJobDetail(request.getJobID()); + if (detail == null) { + return response; + } + response.setId(detail.getJobID()); + JobConnectorConfig config = new JobConnectorConfig(); + config.setSourceConnectorConfig(JsonUtils.objectToMap(detail.getSourceDataSource().getConf())); + config.setSourceConnectorDesc(detail.getSourceConnectorDesc()); + config.setSinkConnectorConfig(JsonUtils.objectToMap(detail.getSinkDataSource().getConf())); + config.setSinkConnectorDesc(detail.getSinkConnectorDesc()); + response.setConnectorConfig(config); + response.setTransportType(detail.getTransportType()); + response.setState(detail.getState()); + response.setPosition(detail.getPositions()); + response.setType(detail.getJobType()); + return response; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchPositionHandler.java new file mode 100644 index 0000000000..85ef0e6113 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/FetchPositionHandler.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.web.db.DBThreadPool; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.service.position.PositionBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; +import org.apache.eventmesh.common.remote.response.FetchPositionResponse; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class FetchPositionHandler extends BaseRequestHandler { + + @Autowired + DBThreadPool executor; + + @Autowired + PositionBizService positionBizService; + + @Override + protected FetchPositionResponse handler(FetchPositionRequest request, Metadata metadata) { + if (request.getDataSourceType() == null) { + return FetchPositionResponse.failResponse(ErrorCode.BAD_REQUEST, "illegal data type, it's empty"); + } + if (StringUtils.isBlank(request.getJobID())) { + return FetchPositionResponse.failResponse(ErrorCode.BAD_REQUEST, "illegal job id, it's empty"); + } + return FetchPositionResponse.successResponse(positionBizService.getPosition(request, metadata)); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportHeartBeatHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportHeartBeatHandler.java new file mode 100644 index 0000000000..26a9b430b7 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportHeartBeatHandler.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.web.db.DBThreadPool; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.service.heatbeat.RuntimeHeartbeatBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest; +import org.apache.eventmesh.common.remote.response.SimpleResponse; +import org.apache.eventmesh.common.utils.IPUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class ReportHeartBeatHandler extends BaseRequestHandler { + + @Autowired + RuntimeHeartbeatBizService heartbeatBizService; + + @Autowired + DBThreadPool executor; + + @Override + protected SimpleResponse handler(ReportHeartBeatRequest request, Metadata metadata) { + if (StringUtils.isBlank(request.getJobID()) || StringUtils.isBlank(request.getAddress())) { + log.info("request [{}] id or reporter address is empty", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "request id or reporter address is empty"); + } + executor.getExecutors().execute(() -> { + EventMeshRuntimeHeartbeat heartbeat = new EventMeshRuntimeHeartbeat(); + heartbeat.setJobID(request.getJobID()); + heartbeat.setReportTime(request.getReportedTimeStamp()); + heartbeat.setAdminAddr(IPUtils.getLocalAddress()); + heartbeat.setRuntimeAddr(request.getAddress()); + try { + if (!heartbeatBizService.saveOrUpdateByRuntimeAddress(heartbeat)) { + log.warn("save or update heartbeat request [{}] fail", request); + } + } catch (Exception e) { + log.warn("save or update heartbeat request [{}] fail", request, e); + } + }); + + return SimpleResponse.success(); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java new file mode 100644 index 0000000000..ea836ce7aa --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportJobRequestHandler.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.ReportJobRequest; +import org.apache.eventmesh.common.remote.response.SimpleResponse; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class ReportJobRequestHandler extends BaseRequestHandler { + + @Autowired + JobInfoBizService jobInfoBizService; + + @Override + public SimpleResponse handler(ReportJobRequest request, Metadata metadata) { + log.info("receive report job request:{}", request); + if (StringUtils.isBlank(request.getJobID())) { + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal job id, it's empty"); + } + EventMeshJobInfo jobInfo = jobInfoBizService.getJobInfo(request.getJobID()); + if (jobInfo == null) { + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal job id, not exist target job,jobID:" + request.getJobID()); + } + boolean result = jobInfoBizService.updateJobState(jobInfo.getJobID(), request.getState()); + if (result) { + return SimpleResponse.success(); + } else { + return SimpleResponse.fail(ErrorCode.INTERNAL_ERR, "update job failed."); + } + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java new file mode 100644 index 0000000000..7a30bef80a --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportPositionHandler.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.web.db.DBThreadPool; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.pojo.JobDetail; +import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.admin.server.web.service.position.PositionBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; +import org.apache.eventmesh.common.remote.response.SimpleResponse; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class ReportPositionHandler extends BaseRequestHandler { + + @Autowired + private JobInfoBizService jobInfoBizService; + + @Autowired + private DBThreadPool executor; + + @Autowired + private PositionBizService positionBizService; + + @Override + protected SimpleResponse handler(ReportPositionRequest request, Metadata metadata) { + log.info("receive report position request:{}", request); + if (StringUtils.isBlank(request.getJobID())) { + log.info("request [{}] illegal job id", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal job id, it's empty"); + } + if (request.getDataSourceType() == null) { + log.info("request [{}] illegal data type", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal data type, it's empty"); + } + if (StringUtils.isBlank(request.getJobID())) { + log.info("request [{}] illegal job id", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal job id, it's empty"); + } + if (request.getRecordPositionList() == null || request.getRecordPositionList().isEmpty()) { + log.info("request [{}] illegal record position", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "illegal record position list, it's empty"); + } + + positionBizService.isValidatePositionRequest(request.getDataSourceType()); + + executor.getExecutors().execute(() -> { + try { + boolean reported = positionBizService.reportPosition(request, metadata); + if (reported) { + if (log.isDebugEnabled()) { + log.debug("handle runtime [{}] report data type [{}] job [{}] position [{}] success", + request.getAddress(), request.getDataSourceType(), request.getJobID(), + request.getRecordPositionList()); + } + } else { + log.warn("handle runtime [{}] report data type [{}] job [{}] position [{}] fail", + request.getAddress(), request.getDataSourceType(), request.getJobID(), + request.getRecordPositionList()); + } + } catch (Exception e) { + log.warn("handle position request fail, request [{}]", request, e); + } finally { + try { + JobDetail detail = jobInfoBizService.getJobDetail(request.getJobID()); + if (detail != null && !detail.getState().equals(request.getState()) && !jobInfoBizService.updateJobState(request.getJobID(), + request.getState())) { + log.warn("update job [{}] old state [{}] to [{}] fail", request.getJobID(), detail.getState(), request.getState()); + } + } catch (Exception e) { + log.warn("update job id [{}] type [{}] state [{}] fail", request.getJobID(), + request.getDataSourceType(), request.getState(), e); + } + } + }); + return SimpleResponse.success(); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportVerifyHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportVerifyHandler.java new file mode 100644 index 0000000000..9844f47c6a --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/handler/impl/ReportVerifyHandler.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.handler.impl; + +import org.apache.eventmesh.admin.server.AdminServerProperties; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.admin.server.web.service.verify.VerifyBizService; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.ReportVerifyRequest; +import org.apache.eventmesh.common.remote.response.SimpleResponse; + +import org.apache.commons.lang3.StringUtils; + +import java.util.List; +import java.util.Random; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Component; +import org.springframework.web.client.RestTemplate; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class ReportVerifyHandler extends BaseRequestHandler { + + @Autowired + private VerifyBizService verifyService; + + @Autowired + JobInfoBizService jobInfoBizService; + + @Autowired + private AdminServerProperties properties; + + @Override + protected SimpleResponse handler(ReportVerifyRequest request, Metadata metadata) { + if (StringUtils.isAnyBlank(request.getTaskID(), request.getJobID(), request.getRecordSig(), request.getRecordID(), + request.getConnectorStage())) { + log.info("report verify request [{}] illegal", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "request task id,job id, sign, record id or stage is none"); + } + + String jobID = request.getJobID(); + EventMeshJobInfo jobInfo = jobInfoBizService.getJobInfo(jobID); + if (jobInfo == null || StringUtils.isBlank(jobInfo.getFromRegion())) { + log.info("report verify job info [{}] illegal", request); + return SimpleResponse.fail(ErrorCode.BAD_REQUEST, "job info is null or fromRegion is blank,job id:" + jobID); + } + + String fromRegion = jobInfo.getFromRegion(); + String localRegion = properties.getRegion(); + log.info("report verify request from region:{},localRegion:{},request:{}", fromRegion, localRegion, request); + if (fromRegion.equalsIgnoreCase(localRegion)) { + return verifyService.reportVerifyRecord(request) ? SimpleResponse.success() : SimpleResponse.fail(ErrorCode.INTERNAL_ERR, "save verify " + + "request fail"); + } else { + log.info("start transfer report verify to from region admin server. from region:{}", fromRegion); + List adminServerList = properties.getAdminServerList().get(fromRegion); + if (adminServerList == null || adminServerList.isEmpty()) { + throw new RuntimeException("No admin server available for region: " + fromRegion); + } + String targetUrl = adminServerList.get(new Random().nextInt(adminServerList.size())) + "/eventmesh/admin/reportVerify"; + RestTemplate restTemplate = new RestTemplate(); + ResponseEntity response = restTemplate.postForEntity(targetUrl, request, String.class); + if (!response.getStatusCode().is2xxSuccessful()) { + return SimpleResponse.fail(ErrorCode.INTERNAL_ERR, + "save verify request fail,code:" + response.getStatusCode() + ",msg:" + response.getBody()); + } + return SimpleResponse.success(); + } + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java new file mode 100644 index 0000000000..0e2fa64878 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/JobDetail.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.pojo; + +import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.TransportType; +import org.apache.eventmesh.common.remote.datasource.DataSource; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.Date; +import java.util.List; + +import lombok.Data; + +@Data +public class JobDetail { + private Integer id; + + private String jobID; + + private String jobDesc; + + private String taskID; + + private TaskState state; + + private JobType jobType; + + private Date createTime; + + private Date updateTime; + + private String createUid; + + private String updateUid; + + // job request from region + private String fromRegion; + + // job actually running region + private String runningRegion; + + private DataSource sourceDataSource; + + private String sourceConnectorDesc; + + private DataSource sinkDataSource; + + private String sinkConnectorDesc; + + private TransportType transportType; + + private List positions; +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/TaskDetail.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/TaskDetail.java new file mode 100644 index 0000000000..86f5342f35 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/pojo/TaskDetail.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.pojo; + +/** + * Description: + */ +public class TaskDetail { +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/AdminGrpcServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/AdminGrpcServer.java new file mode 100644 index 0000000000..bc822ad6c3 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/AdminGrpcServer.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service; + +import org.apache.eventmesh.admin.server.AdminServerRuntimeException; +import org.apache.eventmesh.admin.server.web.handler.BaseRequestHandler; +import org.apache.eventmesh.admin.server.web.handler.RequestHandlerFactory; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.payload.PayloadUtil; +import org.apache.eventmesh.common.remote.request.BaseRemoteRequest; +import org.apache.eventmesh.common.remote.response.BaseRemoteResponse; +import org.apache.eventmesh.common.remote.response.SimpleResponse; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import io.grpc.stub.ServerCallStreamObserver; +import io.grpc.stub.StreamObserver; + +import lombok.extern.slf4j.Slf4j; + +@Service +@Slf4j +public class AdminGrpcServer extends AdminServiceGrpc.AdminServiceImplBase { + + @Autowired + RequestHandlerFactory handlerFactory; + + private Payload process(Payload value) { + if (value == null || StringUtils.isBlank(value.getMetadata().getType())) { + return PayloadUtil.from(SimpleResponse.fail(ErrorCode.BAD_REQUEST, "bad request: type not exists")); + } + try { + BaseRequestHandler handler = handlerFactory.getHandler(value.getMetadata().getType()); + if (handler == null) { + return PayloadUtil.from(SimpleResponse.fail(ErrorCode.BAD_REQUEST, "not match any request handler")); + } + BaseRemoteResponse response = handler.handlerRequest((BaseRemoteRequest) PayloadUtil.parse(value), value.getMetadata()); + if (response == null) { + log.warn("received request type [{}] handler [{}], then replay empty response", value.getMetadata().getType(), + handler.getClass().getName()); + response = SimpleResponse.success(); + } + return PayloadUtil.from(response); + } catch (Exception e) { + log.warn("process payload {} fail", value.getMetadata().getType(), e); + if (e instanceof AdminServerRuntimeException) { + return PayloadUtil.from(SimpleResponse.fail(((AdminServerRuntimeException) e).getCode(), e.getMessage())); + } + return PayloadUtil.from(SimpleResponse.fail(ErrorCode.INTERNAL_ERR, "admin server internal err")); + } + } + + public StreamObserver invokeBiStream(StreamObserver responseObserver) { + return new StreamObserver() { + @Override + public void onNext(Payload value) { + Payload payload = process(value); + if (payload == null) { + return; + } + responseObserver.onNext(payload); + } + + @Override + public void onError(Throwable t) { + if (responseObserver instanceof ServerCallStreamObserver) { + if (!((ServerCallStreamObserver) responseObserver).isCancelled()) { + log.warn("admin gRPC server fail", t); + } + } + } + + @Override + public void onCompleted() { + responseObserver.onCompleted(); + } + }; + } + + public void invoke(Payload request, StreamObserver responseObserver) { + responseObserver.onNext(process(request)); + responseObserver.onCompleted(); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/AdminServer.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/AdminServer.java new file mode 100644 index 0000000000..fd7582800d --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/AdminServer.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service; + +import org.apache.eventmesh.admin.server.AdminServerProperties; +import org.apache.eventmesh.admin.server.AdminServerRuntimeException; +import org.apache.eventmesh.common.ComponentLifeCycle; +import org.apache.eventmesh.common.Constants; +import org.apache.eventmesh.common.config.CommonConfiguration; +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.utils.IPUtils; +import org.apache.eventmesh.registry.RegisterServerInfo; +import org.apache.eventmesh.registry.RegistryFactory; +import org.apache.eventmesh.registry.RegistryService; + +import org.apache.commons.lang3.StringUtils; + +import javax.annotation.PostConstruct; + +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.ApplicationListener; +import org.springframework.stereotype.Service; + +import lombok.extern.slf4j.Slf4j; + +@Service +@Slf4j +public class AdminServer implements ComponentLifeCycle, ApplicationListener { + private final RegistryService registryService; + + private final RegisterServerInfo adminServeInfo; + + private final CommonConfiguration configuration; + + public AdminServer(AdminServerProperties properties) { + configuration = + ConfigService.getInstance().buildConfigInstance(CommonConfiguration.class); + if (configuration == null) { + throw new AdminServerRuntimeException(ErrorCode.STARTUP_CONFIG_MISS, "common configuration file miss"); + } + this.adminServeInfo = new RegisterServerInfo(); + + adminServeInfo.setHealth(true); + adminServeInfo.setAddress(IPUtils.getLocalAddress() + ":" + properties.getPort()); + String name = Constants.ADMIN_SERVER_REGISTRY_NAME; + if (StringUtils.isNotBlank(properties.getServiceName())) { + name = properties.getServiceName(); + } + adminServeInfo.setServiceName(name); + registryService = RegistryFactory.getInstance(configuration.getEventMeshRegistryPluginType()); + } + + @Override + @PostConstruct + public void start() { + if (configuration.isEventMeshRegistryPluginEnabled()) { + registryService.init(); + } + } + + @Override + public void stop() { + if (configuration.isEventMeshRegistryPluginEnabled()) { + registryService.unRegister(adminServeInfo); + try { + Thread.sleep(3000); + } catch (InterruptedException ignore) { + log.warn("interrupted when sleep"); + Thread.currentThread().interrupt(); + } + registryService.shutdown(); + } + } + + @Override + public void onApplicationEvent(ApplicationReadyEvent event) { + if (configuration.isEventMeshRegistryPluginEnabled()) { + log.info("application is started and registry plugin is enabled, it's will register admin self"); + registryService.register(adminServeInfo); + } + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java new file mode 100644 index 0000000000..4d2d670100 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/datasource/DataSourceBizService.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.datasource; + +import org.apache.eventmesh.admin.server.AdminServerRuntimeException; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshDataSourceService; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.request.CreateOrUpdateDataSourceReq; +import org.apache.eventmesh.common.utils.JsonUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class DataSourceBizService { + + @Autowired + private EventMeshDataSourceService dataSourceService; + + public EventMeshDataSource createDataSource(CreateOrUpdateDataSourceReq dataSource) { + EventMeshDataSource entity = new EventMeshDataSource(); + entity.setConfiguration(JsonUtils.toJSONString(dataSource.getConfig())); + entity.setConfigurationClass(dataSource.getConfigClass()); + entity.setDataType(dataSource.getType().name()); + entity.setCreateUid(dataSource.getOperator()); + entity.setUpdateUid(dataSource.getOperator()); + entity.setRegion(dataSource.getRegion()); + entity.setDescription(dataSource.getDesc()); + if (dataSourceService.save(entity)) { + return entity; + } + throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "save data source fail"); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/heatbeat/RuntimeHeartbeatBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/heatbeat/RuntimeHeartbeatBizService.java new file mode 100644 index 0000000000..95dff6e5b3 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/heatbeat/RuntimeHeartbeatBizService.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.heatbeat; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHistory; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshRuntimeHeartbeatService; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshRuntimeHistoryService; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.toolkit.Wrappers; + +import lombok.extern.slf4j.Slf4j; + +/** + * for table 'event_mesh_runtime_heartbeat' db operation 2024-05-14 17:15:03 + */ +@Service +@Slf4j +public class RuntimeHeartbeatBizService { + + @Autowired + EventMeshRuntimeHistoryService historyService; + + @Autowired + EventMeshRuntimeHeartbeatService heartbeatService; + + public boolean saveOrUpdateByRuntimeAddress(EventMeshRuntimeHeartbeat entity) { + EventMeshRuntimeHeartbeat old = heartbeatService.getOne(Wrappers.query().eq( + "runtimeAddr", + entity.getRuntimeAddr())); + if (old == null) { + return heartbeatService.save(entity); + } else { + if (Long.parseLong(old.getReportTime()) >= Long.parseLong(entity.getReportTime())) { + log.info("update heartbeat record ignore, current report time late than db, job [{}], remote [{}]", entity.getJobID(), + entity.getRuntimeAddr()); + return true; + } + try { + return heartbeatService.update(entity, Wrappers.update().eq("updateTime", + old.getUpdateTime())); + } finally { + if (old.getJobID() != null && !old.getJobID().equals(entity.getJobID())) { + EventMeshRuntimeHistory history = new EventMeshRuntimeHistory(); + history.setAddress(old.getAdminAddr()); + history.setJob(old.getJobID()); + try { + historyService.save(history); + } catch (Exception e) { + log.warn("save runtime job changed history fail", e); + } + + log.info("runtime [{}] changed job, old job [{}], now [{}]", entity.getRuntimeAddr(), old.getJobID(), + entity.getJobID()); + } + } + } + } +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java new file mode 100644 index 0000000000..76df629e69 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/job/JobInfoBizService.java @@ -0,0 +1,273 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.job; + +import org.apache.eventmesh.admin.server.AdminServerProperties; +import org.apache.eventmesh.admin.server.AdminServerRuntimeException; +import org.apache.eventmesh.admin.server.web.db.DBThreadPool; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshDataSource; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshRuntimeHeartbeat; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshDataSourceService; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshJobInfoExtService; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshJobInfoService; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshRuntimeHeartbeatService; +import org.apache.eventmesh.admin.server.web.pojo.JobDetail; +import org.apache.eventmesh.admin.server.web.service.datasource.DataSourceBizService; +import org.apache.eventmesh.admin.server.web.service.position.PositionBizService; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.remote.JobState; +import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.TransportType; +import org.apache.eventmesh.common.remote.datasource.DataSource; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.common.remote.request.CreateOrUpdateDataSourceReq; +import org.apache.eventmesh.common.utils.JsonUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.time.Duration; +import java.util.LinkedList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import javax.annotation.PostConstruct; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.toolkit.Wrappers; + +import lombok.extern.slf4j.Slf4j; + +/** + * for table 'event_mesh_job_info' db operation + */ +@Service +@Slf4j +public class JobInfoBizService { + + @Autowired + private EventMeshJobInfoService jobInfoService; + + @Autowired + private EventMeshJobInfoExtService jobInfoExtService; + + @Autowired + private DataSourceBizService dataSourceBizService; + + @Autowired + private EventMeshDataSourceService dataSourceService; + + @Autowired + private PositionBizService positionBizService; + + @Autowired + private AdminServerProperties properties; + + @Autowired + EventMeshRuntimeHeartbeatService heartbeatService; + + private final long heatBeatPeriod = Duration.ofMillis(5000).toMillis(); + + @Autowired + DBThreadPool executor; + + @PostConstruct + public void init() { + log.info("init check job info scheduled task."); + executor.getCheckExecutor().scheduleAtFixedRate(new Runnable() { + @Override + public void run() { + checkJobInfo(); + } + }, 10, 10, TimeUnit.SECONDS); + } + + public boolean updateJobState(String jobID, TaskState state) { + if (jobID == null || state == null) { + return false; + } + EventMeshJobInfo jobInfo = new EventMeshJobInfo(); + jobInfo.setJobState(state.name()); + return jobInfoService.update(jobInfo, Wrappers.update().eq("jobID", jobID).ne("jobState", JobState.DELETE.name())); + } + + public boolean updateJobState(String jobID, JobState state) { + if (jobID == null || state == null) { + return false; + } + EventMeshJobInfo jobInfo = new EventMeshJobInfo(); + jobInfo.setJobState(state.name()); + return jobInfoService.update(jobInfo, Wrappers.update().eq("jobID", jobID).ne("jobState", JobState.DELETE.name())); + } + + @Transactional + public List createJobs(List jobs) { + if (jobs == null || jobs.isEmpty() || jobs.stream().anyMatch(job -> StringUtils.isBlank(job.getTaskID()))) { + log.warn("when create jobs, task id is empty or jobs config is empty "); + return null; + } + List entityList = new LinkedList<>(); + + for (JobDetail job : jobs) { + // if running region not equal with admin region continue + if (!job.getRunningRegion().equals(properties.getRegion())) { + continue; + } + EventMeshJobInfo entity = new EventMeshJobInfo(); + entity.setJobState(TaskState.INIT.name()); + entity.setTaskID(job.getTaskID()); + entity.setJobType(job.getJobType().name()); + entity.setJobDesc(job.getJobDesc()); + String jobID = UUID.randomUUID().toString(); + entity.setJobID(jobID); + entity.setTransportType(job.getTransportType().name()); + entity.setCreateUid(job.getCreateUid()); + entity.setUpdateUid(job.getUpdateUid()); + entity.setFromRegion(job.getFromRegion()); + entity.setRunningRegion(job.getRunningRegion()); + CreateOrUpdateDataSourceReq source = new CreateOrUpdateDataSourceReq(); + source.setType(job.getTransportType().getSrc()); + source.setOperator(job.getCreateUid()); + source.setRegion(job.getSourceDataSource().getRegion()); + source.setDesc(job.getSourceConnectorDesc()); + Config sourceConfig = job.getSourceDataSource().getConf(); + source.setConfig(sourceConfig); + source.setConfigClass(job.getSourceDataSource().getConfClazz().getName()); + EventMeshDataSource createdSource = dataSourceBizService.createDataSource(source); + entity.setSourceData(createdSource.getId()); + + CreateOrUpdateDataSourceReq sink = new CreateOrUpdateDataSourceReq(); + sink.setType(job.getTransportType().getDst()); + sink.setOperator(job.getCreateUid()); + sink.setRegion(job.getSinkDataSource().getRegion()); + sink.setDesc(job.getSinkConnectorDesc()); + Config sinkConfig = job.getSinkDataSource().getConf(); + sink.setConfig(sinkConfig); + sink.setConfigClass(job.getSinkDataSource().getConfClazz().getName()); + EventMeshDataSource createdSink = dataSourceBizService.createDataSource(sink); + entity.setTargetData(createdSink.getId()); + + entityList.add(entity); + } + int changed = jobInfoExtService.batchSave(entityList); + if (changed != jobs.size()) { + throw new AdminServerRuntimeException(ErrorCode.INTERNAL_ERR, String.format("create [%d] jobs of not match expect [%d]", + changed, jobs.size())); + } + return entityList; + } + + + public JobDetail getJobDetail(String jobID) { + if (jobID == null) { + return null; + } + EventMeshJobInfo job = jobInfoService.getOne(Wrappers.query().eq("jobID", jobID)); + if (job == null) { + return null; + } + JobDetail detail = new JobDetail(); + detail.setTaskID(job.getTaskID()); + detail.setJobID(job.getJobID()); + EventMeshDataSource source = dataSourceService.getById(job.getSourceData()); + EventMeshDataSource target = dataSourceService.getById(job.getTargetData()); + if (source != null) { + if (!StringUtils.isBlank(source.getConfiguration())) { + try { + DataSource sourceDataSource = new DataSource(); + Class configClass = Class.forName(source.getConfigurationClass()); + sourceDataSource.setConf((Config) JsonUtils.parseObject(source.getConfiguration(), configClass)); + detail.setSourceDataSource(sourceDataSource); + } catch (Exception e) { + log.warn("parse source config id [{}] fail", job.getSourceData(), e); + throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "illegal source data source config"); + } + } + detail.setSourceConnectorDesc(source.getDescription()); + if (source.getDataType() != null) { + detail.setPositions(positionBizService.getPositionByJobID(job.getJobID(), + DataSourceType.getDataSourceType(source.getDataType()))); + + } + } + if (target != null) { + if (!StringUtils.isBlank(target.getConfiguration())) { + try { + DataSource sinkDataSource = new DataSource(); + Class configClass = Class.forName(target.getConfigurationClass()); + sinkDataSource.setConf((Config) JsonUtils.parseObject(target.getConfiguration(), configClass)); + detail.setSinkDataSource(sinkDataSource); + } catch (Exception e) { + log.warn("parse sink config id [{}] fail", job.getSourceData(), e); + throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "illegal target data sink config"); + } + } + detail.setSinkConnectorDesc(target.getDescription()); + } + + TaskState state = TaskState.fromIndex(job.getJobState()); + if (state == null) { + throw new AdminServerRuntimeException(ErrorCode.BAD_DB_DATA, "illegal job state in db"); + } + detail.setState(state); + detail.setTransportType(TransportType.getTransportType(job.getTransportType())); + detail.setJobType(JobType.fromIndex(job.getJobType())); + detail.setJobDesc(job.getJobDesc()); + return detail; + } + + public EventMeshJobInfo getJobInfo(String jobID) { + if (jobID == null) { + return null; + } + EventMeshJobInfo job = jobInfoService.getOne(Wrappers.query().eq("jobID", jobID)); + return job; + } + + public void checkJobInfo() { + List eventMeshJobInfoList = jobInfoService.list(Wrappers.query().eq("jobState", JobState.RUNNING.name())); + log.info("start check job info.to check job size:{}", eventMeshJobInfoList.size()); + for (EventMeshJobInfo jobInfo : eventMeshJobInfoList) { + String jobID = jobInfo.getJobID(); + if (StringUtils.isEmpty(jobID)) { + continue; + } + EventMeshRuntimeHeartbeat heartbeat = heartbeatService.getOne(Wrappers.query().eq("jobID", jobID)); + if (heartbeat == null) { + continue; + } + // if last heart beat update time have delay three period.print job heart beat delay warn + long currentTimeStamp = System.currentTimeMillis(); + if (currentTimeStamp - heartbeat.getUpdateTime().getTime() > 3 * heatBeatPeriod) { + log.warn("current job heart heart has delay.jobID:{},currentTimeStamp:{},last update time:{}", jobID, currentTimeStamp, + heartbeat.getUpdateTime()); + } + } + } + +} + + + + diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/IFetchPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/IFetchPositionHandler.java new file mode 100644 index 0000000000..2c039062f3 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/IFetchPositionHandler.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position; + +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; + +import java.util.List; + +/** + * IFetchPositionHandler + */ +public interface IFetchPositionHandler { + + List handler(FetchPositionRequest request, Metadata metadata); +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/IReportPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/IReportPositionHandler.java new file mode 100644 index 0000000000..75f392e395 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/IReportPositionHandler.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position; + +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; + +/** + * IReportPositionHandler + */ +public interface IReportPositionHandler { + + boolean handler(ReportPositionRequest request, Metadata metadata); +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionBizService.java new file mode 100644 index 0000000000..c40fc9e7e5 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionBizService.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position; + +import org.apache.eventmesh.admin.server.AdminServerRuntimeException; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; + +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import lombok.extern.slf4j.Slf4j; + +@Service +@Slf4j +public class PositionBizService { + + @Autowired + PositionHandlerFactory factory; + + // called isValidateReportRequest before call this + public List getPosition(FetchPositionRequest request, Metadata metadata) { + if (request == null) { + return null; + } + isValidatePositionRequest(request.getDataSourceType()); + IFetchPositionHandler handler = factory.getHandler(request.getDataSourceType()); + return handler.handler(request, metadata); + } + + public void isValidatePositionRequest(DataSourceType type) { + if (type == null) { + throw new AdminServerRuntimeException(ErrorCode.BAD_REQUEST, "data source type is null"); + } + IReportPositionHandler handler = factory.getHandler(type); + if (handler == null) { + throw new AdminServerRuntimeException(ErrorCode.BAD_REQUEST, + String.format("illegal data base type [%s], it not match any report position handler", type)); + } + } + + // called isValidateReportRequest before call this + public boolean reportPosition(ReportPositionRequest request, Metadata metadata) { + if (request == null) { + return false; + } + isValidatePositionRequest(request.getDataSourceType()); + IReportPositionHandler handler = factory.getHandler(request.getDataSourceType()); + return handler.handler(request, metadata); + } + + public List getPositionByJobID(String jobID, DataSourceType type) { + if (jobID == null || type == null) { + return null; + } + isValidatePositionRequest(type); + PositionHandler handler = factory.getHandler(type); + FetchPositionRequest request = new FetchPositionRequest(); + request.setJobID(jobID); + return handler.handler(request, null); + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionHandler.java new file mode 100644 index 0000000000..e09c1a3837 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionHandler.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position; + +import org.apache.eventmesh.common.remote.datasource.DataSourceType; + +public abstract class PositionHandler implements IReportPositionHandler, IFetchPositionHandler { + + protected abstract DataSourceType getSourceType(); +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionHandlerFactory.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionHandlerFactory.java new file mode 100644 index 0000000000..c2065f80f4 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/PositionHandlerFactory.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position; + +import org.apache.eventmesh.common.remote.datasource.DataSourceType; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class PositionHandlerFactory implements ApplicationListener { + + private final Map handlers = + new ConcurrentHashMap<>(); + + public PositionHandler getHandler(DataSourceType type) { + return handlers.get(type); + } + + @Override + public void onApplicationEvent(ContextRefreshedEvent event) { + Map beans = + event.getApplicationContext().getBeansOfType(PositionHandler.class); + + for (PositionHandler handler : beans.values()) { + DataSourceType type = handler.getSourceType(); + if (handlers.containsKey(type)) { + log.warn("data source type [{}] handler already exists", type); + continue; + } + handlers.put(type, handler); + } + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java new file mode 100644 index 0000000000..b8d536f388 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/HttpPositionHandler.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position.impl; + +import org.apache.eventmesh.admin.server.web.db.service.EventMeshPositionReporterHistoryService; +import org.apache.eventmesh.admin.server.web.service.position.PositionHandler; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class HttpPositionHandler extends PositionHandler { + + @Autowired + EventMeshPositionReporterHistoryService historyService; + + @Override + protected DataSourceType getSourceType() { + return DataSourceType.HTTP; + } + + @Override + public boolean handler(ReportPositionRequest request, Metadata metadata) { + log.info("receive http position report request:{}", request); + // mock wemq postion report store + return true; + } + + @Override + public List handler(FetchPositionRequest request, Metadata metadata) { + // mock http position fetch request + List recordPositionList = new ArrayList<>(); + return recordPositionList; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/MysqlPositionHandler.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/MysqlPositionHandler.java new file mode 100644 index 0000000000..352ba57e96 --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/position/impl/MysqlPositionHandler.java @@ -0,0 +1,192 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.position.impl; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshMysqlPosition; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshPositionReporterHistory; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshMysqlPositionService; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshPositionReporterHistoryService; +import org.apache.eventmesh.admin.server.web.service.position.PositionHandler; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordOffset; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordPartition; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; +import org.apache.eventmesh.common.utils.JsonUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.locks.LockSupport; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.stereotype.Component; + +import com.baomidou.mybatisplus.core.toolkit.Wrappers; + +import lombok.extern.slf4j.Slf4j; + +@Component +@Slf4j +public class MysqlPositionHandler extends PositionHandler { + private static final int RETRY_TIMES = 3; + + private final long retryPeriod = Duration.ofMillis(500).toNanos(); + + @Autowired + EventMeshMysqlPositionService positionService; + + @Autowired + EventMeshPositionReporterHistoryService historyService; + + @Override + protected DataSourceType getSourceType() { + return DataSourceType.MYSQL; + } + + private boolean isNotForward(EventMeshMysqlPosition now, EventMeshMysqlPosition old) { + if (StringUtils.isNotBlank(old.getJournalName()) && old.getJournalName().equals(now.getJournalName()) + && old.getPosition() >= now.getPosition()) { + log.info("job [{}] report position [{}] by runtime [{}] less than db position [{}] journal name [{}] by [{}]", + now.getJobID(), now.getPosition(), now.getAddress(), now.getJournalName(), old.getPosition(), old.getAddress()); + return true; + } + return false; + } + + public boolean saveOrUpdateByJob(EventMeshMysqlPosition position) { + for (int i = 0; i < RETRY_TIMES; i++) { + EventMeshMysqlPosition old = positionService.getOne(Wrappers.query().eq("jobId", + position.getJobID())); + if (old == null) { + try { + return positionService.save(position); + } catch (DuplicateKeyException e) { + log.warn("current insert position fail, it will retry in 500ms"); + LockSupport.parkNanos(retryPeriod); + continue; + } catch (Exception e) { + log.warn("insert position fail catch unknown exception", e); + return false; + } + } + + if (isNotForward(position, old)) { + return true; + } + try { + if (!positionService.update(position, Wrappers.update().eq("updateTime", + old.getUpdateTime()).eq("jobID", old.getJobID()))) { + log.warn("update position [{}] fail, maybe current update. it will retry in 500ms", position); + LockSupport.parkNanos(retryPeriod); + continue; + } + return true; + } finally { + if (old.getAddress() != null && !old.getAddress().equals(position.getAddress())) { + EventMeshPositionReporterHistory history = new EventMeshPositionReporterHistory(); + history.setRecord(JsonUtils.toJSONString(position)); + history.setJob(old.getJobID()); + history.setAddress(old.getAddress()); + log.info("job [{}] position reporter changed old [{}], now [{}]", position.getJobID(), old, position); + try { + historyService.save(history); + } catch (Exception e) { + log.warn("save job [{}] mysql position reporter changed history fail, now reporter [{}], old [{}]", position.getJobID(), + position.getAddress(), old.getAddress(), e); + } + } + } + } + return false; + } + + @Override + public boolean handler(ReportPositionRequest request, Metadata metadata) { + + try { + List recordPositionList = request.getRecordPositionList(); + RecordPosition recordPosition = recordPositionList.get(0); + if (recordPosition == null || recordPosition.getRecordPartition() == null || recordPosition.getRecordOffset() == null) { + log.warn("report mysql position, but record-partition/partition/offset is null"); + return false; + } + if (!(recordPosition.getRecordPartition() instanceof CanalRecordPartition)) { + log.warn("report mysql position, but record partition class [{}] not match [{}]", + recordPosition.getRecordPartition().getRecordPartitionClass(), CanalRecordPartition.class); + return false; + } + if (!(recordPosition.getRecordOffset() instanceof CanalRecordOffset)) { + log.warn("report mysql position, but record offset class [{}] not match [{}]", + recordPosition.getRecordOffset().getRecordOffsetClass(), CanalRecordOffset.class); + return false; + } + EventMeshMysqlPosition position = new EventMeshMysqlPosition(); + position.setJobID(request.getJobID()); + position.setAddress(request.getAddress()); + CanalRecordOffset offset = (CanalRecordOffset) recordPosition.getRecordOffset(); + if (offset != null) { + position.setPosition(offset.getOffset()); + position.setGtid(offset.getGtid()); + position.setCurrentGtid(offset.getCurrentGtid()); + } + CanalRecordPartition partition = (CanalRecordPartition) recordPosition.getRecordPartition(); + if (partition != null) { + position.setServerUUID(partition.getServerUUID()); + position.setTimestamp(partition.getTimeStamp()); + position.setJournalName(partition.getJournalName()); + } + if (!saveOrUpdateByJob(position)) { + log.warn("update job position fail [{}]", request); + return false; + } + return true; + } catch (Exception e) { + log.warn("save position job [{}] fail", request.getJobID(), e); + } + + return false; + } + + @Override + public List handler(FetchPositionRequest request, Metadata metadata) { + List positionList = positionService.list(Wrappers.query().eq("jobID", + request.getJobID())); + List recordPositionList = new ArrayList<>(); + for (EventMeshMysqlPosition position : positionList) { + CanalRecordPartition partition = new CanalRecordPartition(); + partition.setTimeStamp(position.getTimestamp()); + partition.setJournalName(position.getJournalName()); + partition.setServerUUID(position.getServerUUID()); + RecordPosition recordPosition = new RecordPosition(); + recordPosition.setRecordPartition(partition); + CanalRecordOffset offset = new CanalRecordOffset(); + offset.setOffset(position.getPosition()); + offset.setGtid(position.getGtid()); + offset.setCurrentGtid(position.getCurrentGtid()); + recordPosition.setRecordOffset(offset); + recordPositionList.add(recordPosition); + } + return recordPositionList; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java new file mode 100644 index 0000000000..7bc16ba4ac --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/task/TaskBizService.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.task; + +import org.apache.eventmesh.admin.server.AdminServerProperties; +import org.apache.eventmesh.admin.server.web.Response; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshJobInfo; +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshTaskInfo; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshTaskInfoService; +import org.apache.eventmesh.admin.server.web.pojo.JobDetail; +import org.apache.eventmesh.admin.server.web.service.job.JobInfoBizService; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.datasource.DataSource; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.request.CreateTaskRequest; +import org.apache.eventmesh.common.remote.response.CreateTaskResponse; +import org.apache.eventmesh.common.utils.JsonUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.client.RestTemplate; + +@Service +public class TaskBizService { + + @Autowired + private EventMeshTaskInfoService taskInfoService; + + @Autowired + private JobInfoBizService jobInfoService; + + @Autowired + private AdminServerProperties properties; + + private static final String TYPE = "type"; + + private static final String DESC = "desc"; + + private static final String CONF_CLAZZ = "confClazz"; + + private static final String CONF = "conf"; + + private static final String REGION = "region"; + + @Transactional + public CreateTaskResponse createTask(CreateTaskRequest req) { + String taskID = req.getTaskId(); + if (StringUtils.isEmpty(taskID)) { + taskID = UUID.randomUUID().toString(); + req.setTaskId(taskID); + } + + String targetRegion = req.getTargetRegion(); + String remoteResponse = ""; + // not from other admin && target not equals with self region + if (!req.isFlag() && !properties.getRegion().equals(targetRegion)) { + List adminServerList = properties.getAdminServerList().get(targetRegion); + if (adminServerList == null || adminServerList.isEmpty()) { + throw new RuntimeException("No admin server available for region: " + targetRegion); + } + String targetUrl = adminServerList.get(new Random().nextInt(adminServerList.size())) + "/eventmesh/admin/createTask"; + + RestTemplate restTemplate = new RestTemplate(); + req.setFlag(true); + ResponseEntity response = restTemplate.postForEntity(targetUrl, req, String.class); + if (!response.getStatusCode().is2xxSuccessful()) { + throw new RuntimeException("Failed to create task on admin server: " + targetUrl); + } + remoteResponse = response.getBody(); + } + + String finalTaskID = taskID; + List jobs = req.getJobs().stream().map(x -> { + JobDetail job = null; + try { + job = parse(x); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + job.setTaskID(finalTaskID); + job.setCreateUid(req.getUid()); + job.setUpdateUid(req.getUid()); + return job; + }).collect(Collectors.toList()); + + EventMeshTaskInfo taskInfo = new EventMeshTaskInfo(); + taskInfo.setTaskID(finalTaskID); + taskInfo.setTaskName(req.getTaskName()); + taskInfo.setTaskDesc(req.getTaskDesc()); + taskInfo.setTaskState(TaskState.INIT.name()); + taskInfo.setCreateUid(req.getUid()); + taskInfo.setSourceRegion(req.getSourceRegion()); + taskInfo.setTargetRegion(req.getTargetRegion()); + List eventMeshJobInfoList = jobInfoService.createJobs(jobs); + taskInfoService.save(taskInfo); + return buildCreateTaskResponse(finalTaskID, eventMeshJobInfoList, remoteResponse); + } + + private JobDetail parse(CreateTaskRequest.JobDetail src) throws ClassNotFoundException { + JobDetail dst = new JobDetail(); + dst.setJobDesc(src.getJobDesc()); + dst.setTransportType(src.getTransportType()); + dst.setSourceConnectorDesc(src.getSourceConnectorDesc()); + try { + dst.setSourceDataSource(mapToDataSource(src.getSourceDataSource())); + dst.setSinkDataSource(mapToDataSource(src.getSinkDataSource())); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Failed to map data source", e); + } + dst.setSinkConnectorDesc(src.getSinkConnectorDesc()); + // full/increase/check + dst.setJobType(src.getJobType()); + dst.setFromRegion(src.getFromRegion()); + dst.setRunningRegion(src.getRunningRegion()); + return dst; + } + + private DataSource mapToDataSource(Map dataMap) throws ClassNotFoundException { + DataSource dataSource = new DataSource(); + dataSource.setType(DataSourceType.fromString(dataMap.get(TYPE).toString())); + dataSource.setDesc((String) dataMap.get(DESC)); + dataSource.setConfClazz((Class) Class.forName(dataMap.get(CONF_CLAZZ).toString())); + dataSource.setConf(JsonUtils.parseObject(JsonUtils.toJSONString(dataMap.get(CONF)), dataSource.getConfClazz())); + dataSource.setRegion((String) dataMap.get(REGION)); + return dataSource; + } + + private CreateTaskResponse buildCreateTaskResponse(String taskId, List eventMeshJobInfoList, String remoteResponse) { + CreateTaskResponse createTaskResponse = new CreateTaskResponse(); + createTaskResponse.setTaskId(taskId); + List jobDetailList = new ArrayList<>(); + if (!eventMeshJobInfoList.isEmpty()) { + for (EventMeshJobInfo eventMeshJobInfo : eventMeshJobInfoList) { + CreateTaskRequest.JobDetail jobDetail = new CreateTaskRequest.JobDetail(); + jobDetail.setJobId(eventMeshJobInfo.getJobID()); + jobDetail.setRunningRegion(eventMeshJobInfo.getRunningRegion()); + jobDetailList.add(jobDetail); + } + } + if (!StringUtils.isEmpty(remoteResponse)) { + Response response = JsonUtils.parseObject(remoteResponse, Response.class); + CreateTaskResponse remoteCreateTaskResponse = JsonUtils.convertValue(response.getData(), CreateTaskResponse.class); + jobDetailList.addAll(remoteCreateTaskResponse.getJobIdList()); + } + createTaskResponse.setJobIdList(jobDetailList); + return createTaskResponse; + } +} diff --git a/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java new file mode 100644 index 0000000000..e4f08b30cc --- /dev/null +++ b/eventmesh-admin-server/src/main/java/org/apache/eventmesh/admin/server/web/service/verify/VerifyBizService.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.admin.server.web.service.verify; + +import org.apache.eventmesh.admin.server.web.db.entity.EventMeshVerify; +import org.apache.eventmesh.admin.server.web.db.service.EventMeshVerifyService; +import org.apache.eventmesh.common.remote.request.ReportVerifyRequest; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class VerifyBizService { + + @Autowired + private EventMeshVerifyService verifyService; + + public boolean reportVerifyRecord(ReportVerifyRequest request) { + EventMeshVerify verify = new EventMeshVerify(); + verify.setRecordID(request.getRecordID()); + verify.setRecordSig(request.getRecordSig()); + verify.setPosition(request.getPosition()); + verify.setTaskID(request.getTaskID()); + verify.setJobID(request.getJobID()); + verify.setConnectorName(request.getConnectorName()); + verify.setConnectorStage(request.getConnectorStage()); + return verifyService.save(verify); + } +} diff --git a/eventmesh-admin-server/src/main/resources/META-INF/spring.factories b/eventmesh-admin-server/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000..0d9e9bae6e --- /dev/null +++ b/eventmesh-admin-server/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.eventmesh.admin.server.AdminServerProperties \ No newline at end of file diff --git a/eventmesh-common/build.gradle b/eventmesh-common/build.gradle index 07068fe4c1..21b6e63d44 100644 --- a/eventmesh-common/build.gradle +++ b/eventmesh-common/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.64.0' +def grpcVersion = '1.68.0' dependencies { api "com.google.guava:guava" @@ -48,6 +48,7 @@ dependencies { implementation "org.apache.httpcomponents:httpclient" implementation "io.netty:netty-all" + compileOnly 'com.mysql:mysql-connector-j' implementation "io.grpc:grpc-protobuf:${grpcVersion}" implementation "io.grpc:grpc-stub:${grpcVersion}" diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/AbstractComponent.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/AbstractComponent.java new file mode 100644 index 0000000000..375b6cb1d3 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/AbstractComponent.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common; + +import java.util.concurrent.atomic.AtomicBoolean; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public abstract class AbstractComponent implements ComponentLifeCycle { + private final AtomicBoolean started = new AtomicBoolean(false); + private final AtomicBoolean stopped = new AtomicBoolean(false); + + @Override + public void start() throws Exception { + if (!started.compareAndSet(false, true)) { + log.info("component [{}] has started", this.getClass()); + return; + } + log.info("component [{}] will start", this.getClass()); + run(); + log.info("component [{}] started successfully", this.getClass()); + } + + @Override + public void stop() throws Exception { + if (!stopped.compareAndSet(false, true)) { + log.info("component [{}] has stopped", this.getClass()); + return; + } + log.info("component [{}] will stop", this.getClass()); + shutdown(); + log.info("component [{}] stopped successfully", this.getClass()); + } + + protected abstract void run() throws Exception; + + protected abstract void shutdown() throws Exception; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/ComponentLifeCycle.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/ComponentLifeCycle.java new file mode 100644 index 0000000000..76fdd548d0 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/ComponentLifeCycle.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common; + +/** + * LifeCycle of EventMesh Component + */ +public interface ComponentLifeCycle { + + void start() throws Exception; + + void stop() throws Exception; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/Constants.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/Constants.java index 867d50b43b..2460129e75 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/Constants.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/Constants.java @@ -200,9 +200,13 @@ public class Constants { public static final String GRPC = "GRPC"; + public static final String ADMIN = "ADMIN"; + public static final String OS_NAME_KEY = "os.name"; public static final String OS_WIN_PREFIX = "win"; public static final String DEFAULT = "default"; + + public static final String ADMIN_SERVER_REGISTRY_NAME = "DEFAULT_GROUP@@em_adm_server"; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/CommonConfiguration.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/CommonConfiguration.java index 2f38a372ce..04c4ae60ed 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/CommonConfiguration.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/CommonConfiguration.java @@ -106,6 +106,21 @@ public class CommonConfiguration { @ConfigField(field = "server.retry.plugin.type") private String eventMeshRetryPluginType = Constants.DEFAULT; + @ConfigField(field = "registry.plugin.server-addr", notEmpty = true) + private String registryAddr = ""; + + @ConfigField(field = "registry.plugin.type", notEmpty = true) + private String eventMeshRegistryPluginType = "nacos"; + + @ConfigField(field = "registry.plugin.username") + private String eventMeshRegistryPluginUsername = ""; + + @ConfigField(field = "registry.plugin.password") + private String eventMeshRegistryPluginPassword = ""; + + @ConfigField(field = "registry.plugin.enabled") + private boolean eventMeshRegistryPluginEnabled = false; + public void reload() { this.eventMeshWebhookOrigin = "eventmesh." + eventMeshIDC; diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java index 7c5b17d7e6..3f3f609a1f 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/ConfigService.java @@ -60,6 +60,9 @@ private ConfigService() { } public ConfigService setConfigPath(String configPath) { + if (StringUtils.isNotBlank(configPath) && !configPath.endsWith(File.separator)) { + configPath = configPath + File.separator; + } this.configPath = configPath; return this; } @@ -128,7 +131,7 @@ public T getConfig(ConfigInfo configInfo) throws IOException { } else { filePath = path.startsWith(FILE_PATH_PREFIX) ? path.substring(FILE_PATH_PREFIX.length()) : this.configPath + path; } - + filePath = normalizeFilePath(filePath); if (filePath.contains(".jar")) { try (final InputStream inputStream = getClass().getResourceAsStream(Objects.requireNonNull(resourceUrl))) { if (inputStream == null) { @@ -149,6 +152,15 @@ public T getConfig(ConfigInfo configInfo) throws IOException { return (T) object; } + private String normalizeFilePath(String filePath) { + if (System.getProperty("os.name").toLowerCase().contains("win")) { + if (filePath.startsWith("/")) { + filePath = filePath.substring(1); + } + } + return filePath; + } + private void populateConfig(Object object, Class clazz, Config config) throws NoSuchFieldException, IOException, IllegalAccessException { ConfigInfo configInfo = new ConfigInfo(); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/FileLoad.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/FileLoad.java index a0c81ea481..4f8c6687b8 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/FileLoad.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/FileLoad.java @@ -68,6 +68,7 @@ class PropertiesFileLoad implements FileLoad { private final Convert convert = new Convert(); @SuppressWarnings("unchecked") + @Override public T getConfig(ConfigInfo configInfo) throws IOException { final Properties properties = new Properties(); if (StringUtils.isNotBlank(configInfo.getResourceUrl())) { diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/Config.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Config.java similarity index 93% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/Config.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Config.java index 330aa93932..d4bdee7778 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/Config.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Config.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.config; +package org.apache.eventmesh.common.config.connector; public abstract class Config { } diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/Constants.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java similarity index 73% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/Constants.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java index 59794e562a..817efb6d3a 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/Constants.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/Constants.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.config; +package org.apache.eventmesh.common.config.connector; public class Constants { @@ -30,4 +30,21 @@ public class Constants { public static final int DEFAULT_ATTEMPT = 3; public static final int DEFAULT_PORT = 8080; + + // ======================== Source Constants ======================== + /** + * Default capacity + */ + public static final int DEFAULT_CAPACITY = 1024; + + /** + * Default poll batch size + */ + public static final int DEFAULT_POLL_BATCH_SIZE = 10; + + /** + * Default poll timeout (unit: ms) + */ + public static final long DEFAULT_POLL_TIMEOUT = 5000L; + } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java new file mode 100644 index 0000000000..cf3f06be91 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PollConfig.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector; + +import lombok.Data; + +/** + * Source Poll Config + */ +@Data +public class PollConfig { + + /** + * Capacity of the poll queue + */ + private int capacity = Constants.DEFAULT_CAPACITY; + + /** + * Max batch size of the poll + */ + private int maxBatchSize = Constants.DEFAULT_POLL_BATCH_SIZE; + + /** + * Max wait time of the poll + */ + private long maxWaitTime = Constants.DEFAULT_POLL_TIMEOUT; + +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/PubSubConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PubSubConfig.java similarity index 95% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/PubSubConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PubSubConfig.java index 6f5c9cd5f4..be83d51127 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/PubSubConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/PubSubConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.config; +package org.apache.eventmesh.common.config.connector; import lombok.Data; diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/SinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SinkConfig.java similarity index 94% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/SinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SinkConfig.java index 527d02e04d..4ef68291d3 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/SinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SinkConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.config; +package org.apache.eventmesh.common.config.connector; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/SourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java similarity index 80% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/SourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java index 2942be936c..f7bc42970c 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/config/SourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/SourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.config; +package org.apache.eventmesh.common.config.connector; -import org.apache.eventmesh.openconnect.offsetmgmt.api.config.OffsetStorageConfig; +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; import lombok.Data; import lombok.EqualsAndHashCode; @@ -30,4 +30,7 @@ public abstract class SourceConfig extends Config { private OffsetStorageConfig offsetStorageConfig; + // Polling configuration, e.g. capacity, batch size, wait time, etc. + private PollConfig pollConfig = new PollConfig(); + } diff --git a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/config/DingDingSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/dingtalk/DingDingSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/config/DingDingSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/dingtalk/DingDingSinkConfig.java index aa4245219f..3482d5665c 100644 --- a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/config/DingDingSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/dingtalk/DingDingSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.dingtalk.sink.config; +package org.apache.eventmesh.common.config.connector.dingtalk; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/dingtalk/SinkConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/dingtalk/SinkConnectorConfig.java index 2211fd1adc..1dfcd76640 100644 --- a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/dingtalk/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.dingtalk.sink.config; +package org.apache.eventmesh.common.config.connector.dingtalk; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/config/FileSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/FileSinkConfig.java similarity index 89% rename from eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/config/FileSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/FileSinkConfig.java index abdae52770..7de6daa51e 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/config/FileSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/FileSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.file.sink.config; +package org.apache.eventmesh.common.config.connector.file; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/config/FileSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/FileSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/config/FileSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/FileSourceConfig.java index b969544403..06bc4c1745 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/config/FileSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/FileSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.file.source.config; +package org.apache.eventmesh.common.config.connector.file; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/SinkConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/SinkConnectorConfig.java index a8cf958f7f..ee42a17759 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.file.sink.config; +package org.apache.eventmesh.common.config.connector.file; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/SourceConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/SourceConnectorConfig.java index 6376a7fb4a..786d8fa743 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/file/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.file.source.config; +package org.apache.eventmesh.common.config.connector.file; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpRetryConfig.java similarity index 90% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpRetryConfig.java index 0bceac7d47..319732a875 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpRetryConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpRetryConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; import lombok.Data; @@ -24,8 +24,8 @@ public class HttpRetryConfig { // maximum number of retries, default 2, minimum 0 private int maxRetries = 2; - // retry interval, default 2000ms - private int interval = 2000; + // retry interval, default 1000ms + private int interval = 1000; // Default value is false, indicating that only requests with network-level errors will be retried. // If set to true, all failed requests will be retried, including network-level errors and non-2xx responses. diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSinkConfig.java similarity index 89% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSinkConfig.java index 3dd0c2b6a5..3c429f3355 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/config/HttpSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/config/HttpSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSourceConfig.java index bee870cb1a..476dfb10de 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/config/HttpSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.source.config; +package org.apache.eventmesh.common.config.connector.http; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpWebhookConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpWebhookConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpWebhookConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpWebhookConfig.java index f15bac4568..96b9e09826 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/HttpWebhookConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/HttpWebhookConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SinkConnectorConfig.java similarity index 84% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SinkConnectorConfig.java index 9bb338cceb..ccebe5a998 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SinkConnectorConfig.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.config; +package org.apache.eventmesh.common.config.connector.http; -import io.vertx.core.http.HttpClientOptions; import lombok.Data; @@ -29,19 +28,19 @@ public class SinkConnectorConfig { private String[] urls; // keepAlive, default true - private boolean keepAlive = HttpClientOptions.DEFAULT_KEEP_ALIVE; + private boolean keepAlive = true; // timeunit: ms, default 60000ms - private int keepAliveTimeout = HttpClientOptions.DEFAULT_KEEP_ALIVE_TIMEOUT * 1000; // Keep units consistent + private int keepAliveTimeout = 60 * 1000; // Keep units consistent // timeunit: ms, default 5000ms, recommended scope: 5000ms - 10000ms private int connectionTimeout = 5000; // timeunit: ms, default 5000ms - private int idleTimeout; + private int idleTimeout = 5000; // maximum number of HTTP/1 connections a client will pool, default 5 - private int maxConnectionPoolSize = HttpClientOptions.DEFAULT_MAX_POOL_SIZE; + private int maxConnectionPoolSize = 5; // retry config private HttpRetryConfig retryConfig = new HttpRetryConfig(); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java new file mode 100644 index 0000000000..282f883332 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/http/SourceConnectorConfig.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.http; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Data; + +@Data +public class SourceConnectorConfig { + + private String connectorName; + + private String path = "/"; + + private int port; + + // timeunit: ms, default 5000ms + private int idleTimeout = 5000; + + /** + *
    + *
  • The maximum size allowed for form attributes when Content-Type is application/x-www-form-urlencoded or multipart/form-data
  • + *
  • Default is 1MB (1024 * 1024 bytes).
  • + *
  • If you receive a "size exceed allowed maximum capacity" error, you can increase this value.
  • + *
  • Note: This applies only when handling form data submissions.
  • + *
+ */ + private int maxFormAttributeSize = 1024 * 1024; + + // protocol, default Common + private String protocol = "Common"; + + // extra config, e.g. GitHub secret + private Map extraConfig = new HashMap<>(); + + // data consistency enabled, default true + private boolean dataConsistencyEnabled = false; +} diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/config/KnativeSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/KnativeSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/config/KnativeSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/KnativeSinkConfig.java index 85bb38a90b..aff7c275a5 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/config/KnativeSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/KnativeSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.knative.sink.config; +package org.apache.eventmesh.common.config.connector.knative; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/config/KnativeSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/KnativeSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/config/KnativeSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/KnativeSourceConfig.java index 8ef36a8d78..644161d915 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/config/KnativeSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/KnativeSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.knative.source.config; +package org.apache.eventmesh.common.config.connector.knative; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/SinkConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/SinkConnectorConfig.java index 5694c643f8..076bd68756 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.knative.sink.config; +package org.apache.eventmesh.common.config.connector.knative; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/SourceConnectorConfig.java index 8d21c86b7b..98e7815520 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/knative/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.knative.source.config; +package org.apache.eventmesh.common.config.connector.knative; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/config/LarkSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/lark/LarkSinkConfig.java similarity index 89% rename from eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/config/LarkSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/lark/LarkSinkConfig.java index a97ece91b0..a9235129a0 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/config/LarkSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/lark/LarkSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.lark.sink.config; +package org.apache.eventmesh.common.config.connector.lark; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/lark/SinkConnectorConfig.java similarity index 56% rename from eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/lark/SinkConnectorConfig.java index cde3aa6737..a4895b8b2e 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/lark/SinkConnectorConfig.java @@ -15,13 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.lark.sink.config; - -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; - -import org.apache.commons.lang3.StringUtils; - -import com.lark.oapi.service.im.v1.enums.ReceiveIdTypeEnum; +package org.apache.eventmesh.common.config.connector.lark; import lombok.Data; @@ -53,8 +47,6 @@ public class SinkConnectorConfig { /** * When sinking CouldEvent to lark, choose to call - * {@link org.apache.eventmesh.connector.lark.sink.ImServiceHandler#sink(ConnectRecord)} - * or {@link org.apache.eventmesh.connector.lark.sink.ImServiceHandler#sinkAsync(ConnectRecord)} */ private String sinkAsync = "true"; @@ -62,20 +54,4 @@ public class SinkConnectorConfig { private String retryDelayInMills = "1000"; - public void validateSinkConfiguration() { - // validate blank - if (StringUtils.isAnyBlank(appId, appSecret, receiveId)) { - throw new IllegalArgumentException("appId or appSecret or receiveId is blank,please check it."); - } - - // validate receiveIdType - if (!StringUtils.containsAny(receiveIdType, ReceiveIdTypeEnum.CHAT_ID.getValue(), - ReceiveIdTypeEnum.EMAIL.getValue(), - ReceiveIdTypeEnum.OPEN_ID.getValue(), - ReceiveIdTypeEnum.USER_ID.getValue(), - ReceiveIdTypeEnum.UNION_ID.getValue())) { - throw new IllegalArgumentException( - String.format("sinkConnectorConfig.receiveIdType=[%s], Invalid.", receiveIdType)); - } - } } diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/config/KafkaSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/KafkaSinkConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/config/KafkaSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/KafkaSinkConfig.java index 9bb79551db..973eed11ff 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/config/KafkaSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/KafkaSinkConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.kafka.sink.config; +package org.apache.eventmesh.common.config.connector.mq.kafka; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class KafkaSinkConfig extends SinkConfig { diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/config/KafkaSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/KafkaSourceConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/config/KafkaSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/KafkaSourceConfig.java index 4319ec96d7..bf44a82710 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/config/KafkaSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/KafkaSourceConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.kafka.source.config; +package org.apache.eventmesh.common.config.connector.mq.kafka; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class KafkaSourceConfig extends SourceConfig { diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SinkConnectorConfig.java similarity index 96% rename from eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SinkConnectorConfig.java index a240bf4f49..e7584319cb 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.kafka.sink.config; +package org.apache.eventmesh.common.config.connector.mq.kafka; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java index 2d2f52f85c..eb7406f664 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/kafka/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.kafka.source.config; +package org.apache.eventmesh.common.config.connector.mq.kafka; import lombok.Data; @@ -32,5 +32,4 @@ public class SourceConnectorConfig { private String enableAutoCommit = "false"; private String sessionTimeoutMS = "10000"; private String maxPollRecords = "1000"; - private int pollTimeOut = 100; } diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/config/PulsarSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/PulsarSinkConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/config/PulsarSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/PulsarSinkConfig.java index 8d7bfe384b..8cbfd5fb2c 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/config/PulsarSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/PulsarSinkConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pulsar.sink.config; +package org.apache.eventmesh.common.config.connector.mq.pulsar; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class PulsarSinkConfig extends SinkConfig { diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/config/PulsarSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/PulsarSourceConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/config/PulsarSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/PulsarSourceConfig.java index a80c2a0e50..43eb2ca854 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/config/PulsarSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/PulsarSourceConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pulsar.source.config; +package org.apache.eventmesh.common.config.connector.mq.pulsar; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class PulsarSourceConfig extends SourceConfig { diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/SinkConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/SinkConnectorConfig.java index e1ebba09cb..b66f1a5324 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pulsar.sink.config; +package org.apache.eventmesh.common.config.connector.mq.pulsar; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/SourceConnectorConfig.java index bb8ff46875..9f8fbce2d2 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/pulsar/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pulsar.source.config; +package org.apache.eventmesh.common.config.connector.mq.pulsar; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/config/RabbitMQSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/RabbitMQSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/config/RabbitMQSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/RabbitMQSinkConfig.java index 1c4ec66d47..8dcb8ad50c 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/config/RabbitMQSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/RabbitMQSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rabbitmq.sink.config; +package org.apache.eventmesh.common.config.connector.mq.rabbitmq; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/config/RabbitMQSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/RabbitMQSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/config/RabbitMQSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/RabbitMQSourceConfig.java index 55ce402ae7..c268c1005a 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/config/RabbitMQSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/RabbitMQSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rabbitmq.source.config; +package org.apache.eventmesh.common.config.connector.mq.rabbitmq; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/SinkConnectorConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/SinkConnectorConfig.java index 358964bdc0..a6f633f943 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/SinkConnectorConfig.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rabbitmq.sink.config; - -import com.rabbitmq.client.BuiltinExchangeType; +package org.apache.eventmesh.common.config.connector.mq.rabbitmq; import lombok.Data; @@ -36,7 +34,7 @@ public class SinkConnectorConfig { private String virtualHost; - private BuiltinExchangeType exchangeType; + private String exchangeType; private String exchangeName; diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/SourceConnectorConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/SourceConnectorConfig.java index d83149f9ca..29a041338f 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rabbitmq/SourceConnectorConfig.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rabbitmq.source.config; - -import com.rabbitmq.client.BuiltinExchangeType; +package org.apache.eventmesh.common.config.connector.mq.rabbitmq; import lombok.Data; @@ -36,7 +34,7 @@ public class SourceConnectorConfig { private String virtualHost; - private BuiltinExchangeType exchangeType; + private String exchangeType; private String exchangeName; diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/config/RocketMQSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/RocketMQSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/config/RocketMQSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/RocketMQSinkConfig.java index bc4191c9bf..d7c08e8b79 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/config/RocketMQSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/RocketMQSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rocketmq.sink.config; +package org.apache.eventmesh.common.config.connector.mq.rocketmq; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/config/RocketMQSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/RocketMQSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/config/RocketMQSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/RocketMQSourceConfig.java index 43a74fabd4..5c531d91c7 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/config/RocketMQSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/RocketMQSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rocketmq.source.config; +package org.apache.eventmesh.common.config.connector.mq.rocketmq; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/SinkConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/SinkConnectorConfig.java index 034dd7ce0c..93472f7a4c 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rocketmq.sink.config; +package org.apache.eventmesh.common.config.connector.mq.rocketmq; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/SourceConnectorConfig.java index 1dc8a0d5c9..641d1873bb 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/mq/rocketmq/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.rocketmq.source.config; +package org.apache.eventmesh.common.config.connector.mq.rocketmq; import lombok.Data; diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/config/OffsetStorageConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/offset/OffsetStorageConfig.java similarity index 82% rename from eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/config/OffsetStorageConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/offset/OffsetStorageConfig.java index 30a56a3d2f..60448d3691 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/config/OffsetStorageConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/offset/OffsetStorageConfig.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.offsetmgmt.api.config; +package org.apache.eventmesh.common.config.connector.offset; + +import org.apache.eventmesh.common.remote.datasource.DataSourceType; import java.util.Map; @@ -29,4 +31,8 @@ public class OffsetStorageConfig { private String offsetStorageAddr; private Map extensions; + + private DataSourceType dataSourceType; + + private DataSourceType dataSinkType; } diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/config/OpenFunctionSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/OpenFunctionSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/config/OpenFunctionSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/OpenFunctionSinkConfig.java index 6e4fbe3dc4..151ff68834 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/config/OpenFunctionSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/OpenFunctionSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.openfunction.sink.config; +package org.apache.eventmesh.common.config.connector.openfunction; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/config/OpenFunctionSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/OpenFunctionSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/config/OpenFunctionSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/OpenFunctionSourceConfig.java index f3f5b52756..e3a6123ed9 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/config/OpenFunctionSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/OpenFunctionSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.openfunction.source.config; +package org.apache.eventmesh.common.config.connector.openfunction; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/SinkConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/SinkConnectorConfig.java index 77c1b7c701..b8bc937390 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.openfunction.sink.config; +package org.apache.eventmesh.common.config.connector.openfunction; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/SourceConnectorConfig.java index 93fc9739fc..09dbdea14c 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/openfunction/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.openfunction.source.config; +package org.apache.eventmesh.common.config.connector.openfunction; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/config/PravegaSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/PravegaSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/config/PravegaSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/PravegaSinkConfig.java index 739c6539b5..a94766c20c 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/config/PravegaSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/PravegaSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pravega.sink.config; +package org.apache.eventmesh.common.config.connector.pravega; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/config/PravegaSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/PravegaSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/config/PravegaSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/PravegaSourceConfig.java index 87fbd3d573..105d3474ee 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/config/PravegaSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/PravegaSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pravega.source.config; +package org.apache.eventmesh.common.config.connector.pravega; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/SinkConnectorConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/SinkConnectorConfig.java index c254dee3b8..7f56ea57a8 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pravega.sink.config; +package org.apache.eventmesh.common.config.connector.pravega; import java.net.URI; diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/SourceConnectorConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/SourceConnectorConfig.java index 1ff54d1c1c..da0f8c5f13 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/pravega/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.pravega.source.config; +package org.apache.eventmesh.common.config.connector.pravega; import java.net.URI; diff --git a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/config/PrometheusSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/prometheus/PrometheusSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/config/PrometheusSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/prometheus/PrometheusSourceConfig.java index 292b7e62f1..3393e4a193 100644 --- a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/config/PrometheusSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/prometheus/PrometheusSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.prometheus.source.config; +package org.apache.eventmesh.common.config.connector.prometheus; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/prometheus/SourceConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/prometheus/SourceConnectorConfig.java index 7e8b7ba93e..1df3fe18cb 100644 --- a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/prometheus/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.prometheus.source.config; +package org.apache.eventmesh.common.config.connector.prometheus; import lombok.Data; diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/JdbcConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/JdbcConfig.java new file mode 100644 index 0000000000..fc784fc187 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/JdbcConfig.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb; + +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; + +import java.util.Set; + +import lombok.Data; + +@Data +public class JdbcConfig { + private String url; + + private String dbAddress; + + private int dbPort; + + private String userName; + + private String passWord; + + private Set databases; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalMySQLType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalMySQLType.java new file mode 100644 index 0000000000..b5107ccbf3 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalMySQLType.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import java.util.HashMap; +import java.util.Map; + +import com.mysql.cj.MysqlType; + +public enum CanalMySQLType { + BIT("BIT"), + TINYINT("TINYINT"), + SMALLINT("SMALLINT"), + MEDIUMINT("MEDIUMINT"), + INT("INT"), + BIGINT("BIGINT"), + DECIMAL("DECIMAL"), + FLOAT("FLOAT"), + DOUBLE("DOUBLE"), + DATE("DATE"), + DATETIME("DATETIME"), + TIMESTAMP("TIMESTAMP"), + TIME("TIME"), + YEAR("YEAR"), + CHAR("CHAR"), + VARCHAR("VARCHAR"), + BINARY("BINARY"), + VARBINARY("VARBINARY"), + TINYBLOB("TINYBLOB"), + BLOB("BLOB"), + MEDIUMBLOB("MEDIUMBLOB"), + LONGBLOB("LONGBLOB"), + TINYTEXT("TINYTEXT"), + TEXT("TEXT"), + MEDIUMTEXT("MEDIUMTEXT"), + LONGTEXT("LONGTEXT"), + ENUM("ENUM"), + SET("SET"), + JSON("JSON"), + GEOMETRY("GEOMETRY"), + // MysqlType not include the following type + POINT("POINT"), + LINESTRING("LINESTRING"), + POLYGON("POLYGON"), + MULTIPOINT("MULTIPOINT"), + GEOMETRY_COLLECTION("GEOMETRYCOLLECTION"), + GEOM_COLLECTION("GEOMCOLLECTION"), + MULTILINESTRING("MULTILINESTRING"), + MULTIPOLYGON("MULTIPOLYGON"); + + private final String codeKey; + private final MysqlType mysqlType; + + CanalMySQLType(String codeKey) { + this.codeKey = codeKey; + this.mysqlType = MysqlType.getByName(codeKey); + } + + private static final Map TYPES = new HashMap<>(); + + static { + CanalMySQLType[] values = values(); + for (CanalMySQLType tableType : values) { + TYPES.put(tableType.codeKey, tableType); + } + } + + public String genPrepareStatement4Insert() { + switch (this) { + case GEOMETRY: + case GEOM_COLLECTION: + case GEOMETRY_COLLECTION: + return "ST_GEOMFROMTEXT(?)"; + case POINT: + return "ST_PointFromText(?)"; + case LINESTRING: + return "ST_LineStringFromText(?)"; + case POLYGON: + return "ST_PolygonFromText(?)"; + case MULTIPOINT: + return "ST_MultiPointFromText(?)"; + case MULTILINESTRING: + return "ST_MultiLineStringFromText(?)"; + case MULTIPOLYGON: + return "ST_MultiPolygonFromText(?)"; + default: + return "?"; + } + } + + public static CanalMySQLType valueOfCode(String code) { + CanalMySQLType type = TYPES.get(code.toUpperCase()); + if (type != null) { + return type; + } + switch (MysqlType.getByName(code)) { + case BOOLEAN: + case TINYINT: + case TINYINT_UNSIGNED: + return TINYINT; + case SMALLINT: + case SMALLINT_UNSIGNED: + return SMALLINT; + case INT: + case INT_UNSIGNED: + return INT; + case BIGINT: + case BIGINT_UNSIGNED: + return BIGINT; + case MEDIUMINT: + case MEDIUMINT_UNSIGNED: + return MEDIUMINT; + case DECIMAL: + case DECIMAL_UNSIGNED: + return DECIMAL; + case FLOAT: + case FLOAT_UNSIGNED: + return FLOAT; + case DOUBLE: + case DOUBLE_UNSIGNED: + return DOUBLE; + case BIT: + return BIT; + case BINARY: + return BINARY; + case VARBINARY: + return VARBINARY; + case TINYBLOB: + return TINYBLOB; + case MEDIUMBLOB: + return MEDIUMBLOB; + case LONGBLOB: + return LONGBLOB; + case BLOB: + return BLOB; + case CHAR: + return CHAR; + case VARCHAR: + return VARCHAR; + case TINYTEXT: + return TINYTEXT; + case MEDIUMTEXT: + return MEDIUMTEXT; + case LONGTEXT: + return LONGTEXT; + case TEXT: + return TEXT; + case DATE: + return DATE; + case TIME: + return TIME; + case TIMESTAMP: + return TIMESTAMP; + case DATETIME: + return DATETIME; + case YEAR: + return YEAR; + case JSON: + return JSON; + case ENUM: + return ENUM; + case SET: + return SET; + case GEOMETRY: + return GEOMETRY; + case NULL: + case UNKNOWN: + default: + throw new UnsupportedOperationException("Unsupported mysql columnType " + code); + } + } + + public MysqlType getMysqlType() { + return mysqlType; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java new file mode 100644 index 0000000000..c535c7f52a --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.config.connector.SinkConfig; + +import java.util.Map; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSinkConfig extends SinkConfig { + + // used to convert canal full/increment/check connector config + private Map sinkConfig; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java new file mode 100644 index 0000000000..f1d78a65dc --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkFullConfig.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.config.connector.SinkConfig; + +import lombok.Data; +import lombok.EqualsAndHashCode; + + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSinkFullConfig extends SinkConfig { + private SinkConnectorConfig sinkConnectorConfig; + private String zeroDate; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java new file mode 100644 index 0000000000..32112a769b --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSinkIncrementConfig.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.remote.job.SyncMode; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSinkIncrementConfig extends CanalSinkConfig { + + // batchSize + private Integer batchSize = 50; + + // enable batch + private Boolean useBatch = true; + + // sink thread size for single channel + private Integer poolSize = 5; + + // sync mode: field/row + private SyncMode syncMode; + + private boolean isGTIDMode = true; + + private boolean isMariaDB = true; + + // skip sink process exception + private Boolean skipException = false; + + public SinkConnectorConfig sinkConnectorConfig; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java new file mode 100644 index 0000000000..db17fbe75d --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.config.connector.SourceConfig; + +import java.util.Map; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSourceConfig extends SourceConfig { + + // used to convert canal full/increment/check connector config + private Map sourceConfig; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java new file mode 100644 index 0000000000..15398b303a --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceFullConfig.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSourceFullConfig extends SourceConfig { + private SourceConnectorConfig sourceConnectorConfig; + private List startPosition; + private int parallel; + private int flushSize; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java new file mode 100644 index 0000000000..94fe007b5f --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/CanalSourceIncrementConfig.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.remote.job.SyncConsistency; +import org.apache.eventmesh.common.remote.job.SyncMode; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalSourceIncrementConfig extends CanalSourceConfig { + + private String destination; + + private Long canalInstanceId; + + private String desc; + + private boolean ddlSync = true; + + private boolean filterTableError = false; + + private Long slaveId; + + private Short clientId; + + private String serverUUID; + + private boolean isMariaDB = true; + + private boolean isGTIDMode = true; + + private Integer batchSize = 10000; + + private Long batchTimeout = -1L; + + private String tableFilter; + + private String fieldFilter; + + private List recordPositions; + + // ================================= channel parameter + // ================================ + + // enable remedy + private Boolean enableRemedy = false; + + // sync mode: field/row + private SyncMode syncMode; + + // sync consistency + private SyncConsistency syncConsistency; + + // ================================= system parameter + // ================================ + + // Column name of the bidirectional synchronization mark + private String needSyncMarkTableColumnName = "needSync"; + + // Column value of the bidirectional synchronization mark + private String needSyncMarkTableColumnValue = "needSync"; + + private SourceConnectorConfig sourceConnectorConfig; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/JobRdbFullPosition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/JobRdbFullPosition.java new file mode 100644 index 0000000000..08f88e1d24 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/JobRdbFullPosition.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import java.math.BigDecimal; + +import lombok.Data; +import lombok.ToString; + +@Data +@ToString +public class JobRdbFullPosition { + private String jobId; + private String schema; + private String tableName; + private String primaryKeyRecords; + private long maxCount; + private boolean finished; + private BigDecimal percent; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbColumnDefinition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbColumnDefinition.java new file mode 100644 index 0000000000..94c0135c3e --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbColumnDefinition.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import java.sql.JDBCType; + +import lombok.Data; + +@Data +public class RdbColumnDefinition { + protected String name; + protected JDBCType jdbcType; +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbDBDefinition.java similarity index 78% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbDBDefinition.java index 873a0d1922..ab3ed336f8 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbDBDefinition.java @@ -15,18 +15,17 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.source.config; +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import java.util.Set; import lombok.Data; +/** + * Description: as class name + */ @Data -public class SourceConnectorConfig { - - private String connectorName; - - private String path; - - private int port; - - private int idleTimeout; +public class RdbDBDefinition { + private String schemaName; + private Set tables; } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbTableDefinition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbTableDefinition.java new file mode 100644 index 0000000000..c281035578 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/RdbTableDefinition.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import lombok.Data; + +/** + * Description: as class name + */ +@Data +public class RdbTableDefinition { + protected String schemaName; + protected String tableName; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/SinkConnectorConfig.java new file mode 100644 index 0000000000..761cdba4bb --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/SinkConnectorConfig.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.config.connector.rdb.JdbcConfig; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +/** + * Configuration parameters for a sink connector. + */ +@Data +@EqualsAndHashCode(callSuper = true) +public class SinkConnectorConfig extends JdbcConfig { + private String connectorName; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/SourceConnectorConfig.java new file mode 100644 index 0000000000..9a95696a0d --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/SourceConnectorConfig.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal; + +import org.apache.eventmesh.common.config.connector.rdb.JdbcConfig; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +/** + * Represents the configuration for a database connector. + */ +@Data +@EqualsAndHashCode(callSuper = true) +public class SourceConnectorConfig extends JdbcConfig { + private String connectorName; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/Constants.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/Constants.java new file mode 100644 index 0000000000..8c51c7255b --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/Constants.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal.mysql; + +public class Constants { + public static final String MySQLQuot = "`"; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/MySQLColumnDef.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/MySQLColumnDef.java new file mode 100644 index 0000000000..cdc9adf33f --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/MySQLColumnDef.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal.mysql; + +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalMySQLType; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbColumnDefinition; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class MySQLColumnDef extends RdbColumnDefinition { + private CanalMySQLType type; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/MySQLTableDef.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/MySQLTableDef.java new file mode 100644 index 0000000000..cdd3652378 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/canal/mysql/MySQLTableDef.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.config.connector.rdb.canal.mysql; + +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; + +import java.util.Map; +import java.util.Set; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +/** + * Description: + */ +@Data +@EqualsAndHashCode(callSuper = true) +public class MySQLTableDef extends RdbTableDefinition { + private Set primaryKeys; + private Map columnDefinitions; +} diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcConfig.java similarity index 96% rename from eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcConfig.java index d40801854c..1b46a76c99 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.jdbc.config; +package org.apache.eventmesh.common.config.connector.rdb.jdbc; import java.util.Properties; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/config/JdbcSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcSinkConfig.java similarity index 89% rename from eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/config/JdbcSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcSinkConfig.java index 2a5af32cad..83711c68be 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/config/JdbcSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.jdbc.sink.config; +package org.apache.eventmesh.common.config.connector.rdb.jdbc; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/JdbcSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcSourceConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/JdbcSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcSourceConfig.java index b330c331bf..553a0581a2 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/JdbcSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/JdbcSourceConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.jdbc.source.config; +package org.apache.eventmesh.common.config.connector.rdb.jdbc; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/MysqlConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/MysqlConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/MysqlConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/MysqlConfig.java index 032921350f..ede507e1be 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/MysqlConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/MysqlConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.jdbc.source.config; +package org.apache.eventmesh.common.config.connector.rdb.jdbc; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/SinkConnectorConfig.java similarity index 90% rename from eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/SinkConnectorConfig.java index e971b7b000..afbd51d143 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/SinkConnectorConfig.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.jdbc.sink.config; - -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; +package org.apache.eventmesh.common.config.connector.rdb.jdbc; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/SourceConnectorConfig.java similarity index 95% rename from eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/SourceConnectorConfig.java index a27dbfedb4..a10d8e09d0 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/jdbc/SourceConnectorConfig.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.jdbc.source.config; - -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; +package org.apache.eventmesh.common.config.connector.rdb.jdbc; import java.util.List; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/config/MongodbSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/MongodbSinkConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/config/MongodbSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/MongodbSinkConfig.java index 6df203bd6a..7d019ba1ff 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/config/MongodbSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/MongodbSinkConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.mongodb.sink.config; +package org.apache.eventmesh.common.config.connector.rdb.mongodb; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class MongodbSinkConfig extends SinkConfig { diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/config/MongodbSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/MongodbSourceConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/config/MongodbSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/MongodbSourceConfig.java index d67ad6c277..00dca10b29 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/config/MongodbSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/MongodbSourceConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.mongodb.source.config; +package org.apache.eventmesh.common.config.connector.rdb.mongodb; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class MongodbSourceConfig extends SourceConfig { diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/SinkConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/SinkConnectorConfig.java index 087b2e48ce..58ae5ffc42 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.mongodb.sink.config; +package org.apache.eventmesh.common.config.connector.rdb.mongodb; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/SourceConnectorConfig.java index 5c48645a43..8ace602f80 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/rdb/mongodb/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.mongodb.source.config; +package org.apache.eventmesh.common.config.connector.rdb.mongodb; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/config/RedisSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/RedisSinkConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/config/RedisSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/RedisSinkConfig.java index 62cddbf312..27070343d4 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/config/RedisSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/RedisSinkConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.redis.sink.config; +package org.apache.eventmesh.common.config.connector.redis; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class RedisSinkConfig extends SinkConfig { diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/config/RedisSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/RedisSourceConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/config/RedisSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/RedisSourceConfig.java index f01a556c9f..5b04e6a820 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/config/RedisSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/RedisSourceConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.redis.source.config; +package org.apache.eventmesh.common.config.connector.redis; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class RedisSourceConfig extends SourceConfig { diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/SinkConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/SinkConnectorConfig.java index 34fb0326a4..ebf53adc3b 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.redis.sink.config; +package org.apache.eventmesh.common.config.connector.redis; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/SourceConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/SourceConnectorConfig.java index 9913e16320..b363924ecb 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/redis/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.redis.source.config; +package org.apache.eventmesh.common.config.connector.redis; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/config/S3SourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/s3/S3SourceConfig.java similarity index 83% rename from eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/config/S3SourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/s3/S3SourceConfig.java index 0f4bec5d56..7691b6e235 100644 --- a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/config/S3SourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/s3/S3SourceConfig.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.s3.source.config; +package org.apache.eventmesh.common.config.connector.s3; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; +import lombok.EqualsAndHashCode; +@EqualsAndHashCode(callSuper = true) @Data public class S3SourceConfig extends SourceConfig { diff --git a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/config/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/s3/SourceConnectorConfig.java similarity index 96% rename from eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/config/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/s3/SourceConnectorConfig.java index 2c9cb5a80c..fdc4531255 100644 --- a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/config/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/s3/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.s3.source.config; +package org.apache.eventmesh.common.config.connector.s3; import java.util.Map; diff --git a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/slack/SinkConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/slack/SinkConnectorConfig.java index 41884a94a1..4267beddff 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/slack/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.slack.sink.config; +package org.apache.eventmesh.common.config.connector.slack; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/config/SlackSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/slack/SlackSinkConfig.java similarity index 89% rename from eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/config/SlackSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/slack/SlackSinkConfig.java index 016cd9ae97..969306ae6e 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/config/SlackSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/slack/SlackSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.slack.sink.config; +package org.apache.eventmesh.common.config.connector.slack; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SinkConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SinkConnectorConfig.java index a71c4ab66c..0ded9886dc 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.spring.sink.connector; +package org.apache.eventmesh.common.config.connector.spring; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SourceConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SourceConnectorConfig.java similarity index 93% rename from eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SourceConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SourceConnectorConfig.java index 242869087f..c31f8c9885 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SourceConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SourceConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.spring.source.connector; +package org.apache.eventmesh.common.config.connector.spring; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/config/SpringSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SpringSinkConfig.java similarity index 83% rename from eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/config/SpringSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SpringSinkConfig.java index aff0d8a0f3..14f1f4bbe5 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/config/SpringSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SpringSinkConfig.java @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.spring.sink.config; +package org.apache.eventmesh.common.config.connector.spring; -import org.apache.eventmesh.connector.spring.sink.connector.SinkConnectorConfig; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/config/SpringSourceConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SpringSourceConfig.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/config/SpringSourceConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SpringSourceConfig.java index 191e9b1196..e99dfc594c 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/config/SpringSourceConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/spring/SpringSourceConfig.java @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.spring.source.config; +package org.apache.eventmesh.common.config.connector.spring; -import org.apache.eventmesh.connector.spring.source.connector.SourceConnectorConfig; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wechat/SinkConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wechat/SinkConnectorConfig.java index e575e65c67..95ca5e64c2 100644 --- a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wechat/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.wechat.sink.config; +package org.apache.eventmesh.common.config.connector.wechat; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/config/WeChatSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wechat/WeChatSinkConfig.java similarity index 88% rename from eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/config/WeChatSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wechat/WeChatSinkConfig.java index a77e1c77b5..e968202c4c 100644 --- a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/config/WeChatSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wechat/WeChatSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.wechat.sink.config; +package org.apache.eventmesh.common.config.connector.wechat; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/config/SinkConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wecom/SinkConnectorConfig.java similarity index 94% rename from eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/config/SinkConnectorConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wecom/SinkConnectorConfig.java index 013d5a8bb7..b32b5b7d36 100644 --- a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/config/SinkConnectorConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wecom/SinkConnectorConfig.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.wecom.sink.config; +package org.apache.eventmesh.common.config.connector.wecom; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/config/WeComSinkConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wecom/WeComSinkConfig.java similarity index 89% rename from eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/config/WeComSinkConfig.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wecom/WeComSinkConfig.java index 8af43bdbe8..5177baa448 100644 --- a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/config/WeComSinkConfig.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/config/connector/wecom/WeComSinkConfig.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.wecom.sink.config; +package org.apache.eventmesh.common.config.connector.wecom; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/enums/ComponentType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/enums/ComponentType.java new file mode 100644 index 0000000000..a48e3fe013 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/enums/ComponentType.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.enums; + +public enum ComponentType { + CONNECTOR("connector"), + FUNCTION("function"), + MESH("mesh"); + + public String name; + + ComponentType(String name) { + this.name = name; + } + + public String componentTypeName() { + return this.name; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/enums/ConnectorStage.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/enums/ConnectorStage.java new file mode 100644 index 0000000000..90265fba4a --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/enums/ConnectorStage.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.enums; + +public enum ConnectorStage { + SOURCE, + SINK +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/AdminServiceGrpc.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/AdminServiceGrpc.java new file mode 100644 index 0000000000..df5f3ef338 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/AdminServiceGrpc.java @@ -0,0 +1,373 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.protocol.grpc.adminserver; + +import static io.grpc.MethodDescriptor.generateFullMethodName; + +/** + * AdminServiceGrpc + */ +@javax.annotation.Generated( + value = "by gRPC proto compiler (version 1.40.0)", + comments = "Source: event_mesh_admin_service.proto") +@io.grpc.stub.annotations.GrpcGenerated +public final class AdminServiceGrpc { + + private AdminServiceGrpc() { + } + + public static final String SERVICE_NAME = "AdminService"; + + // Static method descriptors that strictly reflect the proto. + private static volatile io.grpc.MethodDescriptor getInvokeBiStreamMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "invokeBiStream", + requestType = Payload.class, + responseType = Payload.class, + methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + public static io.grpc.MethodDescriptor getInvokeBiStreamMethod() { + io.grpc.MethodDescriptor getInvokeBiStreamMethod; + if ((getInvokeBiStreamMethod = AdminServiceGrpc.getInvokeBiStreamMethod) == null) { + synchronized (AdminServiceGrpc.class) { + if ((getInvokeBiStreamMethod = AdminServiceGrpc.getInvokeBiStreamMethod) == null) { + AdminServiceGrpc.getInvokeBiStreamMethod = getInvokeBiStreamMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "invokeBiStream")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + Payload.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + Payload.getDefaultInstance())) + .setSchemaDescriptor(new AdminServiceMethodDescriptorSupplier("invokeBiStream")) + .build(); + } + } + } + return getInvokeBiStreamMethod; + } + + private static volatile io.grpc.MethodDescriptor getInvokeMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "invoke", + requestType = Payload.class, + responseType = Payload.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor getInvokeMethod() { + io.grpc.MethodDescriptor getInvokeMethod; + if ((getInvokeMethod = AdminServiceGrpc.getInvokeMethod) == null) { + synchronized (AdminServiceGrpc.class) { + if ((getInvokeMethod = AdminServiceGrpc.getInvokeMethod) == null) { + AdminServiceGrpc.getInvokeMethod = getInvokeMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "invoke")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + Payload.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + Payload.getDefaultInstance())) + .setSchemaDescriptor(new AdminServiceMethodDescriptorSupplier("invoke")) + .build(); + } + } + } + return getInvokeMethod; + } + + /** + * Creates a new async stub that supports all call types for the service + */ + public static AdminServiceStub newStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @Override + public AdminServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new AdminServiceStub(channel, callOptions); + } + }; + return AdminServiceStub.newStub(factory, channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static AdminServiceBlockingStub newBlockingStub( + io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @Override + public AdminServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new AdminServiceBlockingStub(channel, callOptions); + } + }; + return AdminServiceBlockingStub.newStub(factory, channel); + } + + /** + * Creates a new ListenableFuture-style stub that supports unary calls on the service + */ + public static AdminServiceFutureStub newFutureStub( + io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @Override + public AdminServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new AdminServiceFutureStub(channel, callOptions); + } + }; + return AdminServiceFutureStub.newStub(factory, channel); + } + + /** + * + */ + public static abstract class AdminServiceImplBase implements io.grpc.BindableService { + + /** + * + */ + public io.grpc.stub.StreamObserver invokeBiStream( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(getInvokeBiStreamMethod(), responseObserver); + } + + /** + * + */ + public void invoke(Payload request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getInvokeMethod(), responseObserver); + } + + @Override + public final io.grpc.ServerServiceDefinition bindService() { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getInvokeBiStreamMethod(), + io.grpc.stub.ServerCalls.asyncBidiStreamingCall( + new MethodHandlers< + Payload, + Payload>( + this, METHODID_INVOKE_BI_STREAM))) + .addMethod( + getInvokeMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + Payload, + Payload>( + this, METHODID_INVOKE))) + .build(); + } + } + + /** + * + */ + public static final class AdminServiceStub extends io.grpc.stub.AbstractAsyncStub { + + private AdminServiceStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @Override + protected AdminServiceStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new AdminServiceStub(channel, callOptions); + } + + /** + * + */ + public io.grpc.stub.StreamObserver invokeBiStream( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ClientCalls.asyncBidiStreamingCall( + getChannel().newCall(getInvokeBiStreamMethod(), getCallOptions()), responseObserver); + } + + /** + * + */ + public void invoke(Payload request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getInvokeMethod(), getCallOptions()), request, responseObserver); + } + } + + /** + * + */ + public static final class AdminServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub { + + private AdminServiceBlockingStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @Override + protected AdminServiceBlockingStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new AdminServiceBlockingStub(channel, callOptions); + } + + /** + * + */ + public Payload invoke(Payload request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getInvokeMethod(), getCallOptions(), request); + } + } + + /** + * + */ + public static final class AdminServiceFutureStub extends io.grpc.stub.AbstractFutureStub { + + private AdminServiceFutureStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @Override + protected AdminServiceFutureStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new AdminServiceFutureStub(channel, callOptions); + } + + /** + * + */ + public com.google.common.util.concurrent.ListenableFuture invoke( + Payload request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getInvokeMethod(), getCallOptions()), request); + } + } + + private static final int METHODID_INVOKE = 0; + private static final int METHODID_INVOKE_BI_STREAM = 1; + + private static final class MethodHandlers implements + io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + + private final AdminServiceImplBase serviceImpl; + private final int methodId; + + MethodHandlers(AdminServiceImplBase serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @Override + @SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_INVOKE: + serviceImpl.invoke((Payload) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @Override + @SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_INVOKE_BI_STREAM: + return (io.grpc.stub.StreamObserver) serviceImpl.invokeBiStream( + (io.grpc.stub.StreamObserver) responseObserver); + default: + throw new AssertionError(); + } + } + } + + private static abstract class AdminServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { + + AdminServiceBaseDescriptorSupplier() { + } + + @Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return EventMeshAdminService.getDescriptor(); + } + + @Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("AdminService"); + } + } + + private static final class AdminServiceFileDescriptorSupplier + extends AdminServiceBaseDescriptorSupplier { + + AdminServiceFileDescriptorSupplier() { + } + } + + private static final class AdminServiceMethodDescriptorSupplier + extends AdminServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + + private final String methodName; + + AdminServiceMethodDescriptorSupplier(String methodName) { + this.methodName = methodName; + } + + @Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (AdminServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new AdminServiceFileDescriptorSupplier()) + .addMethod(getInvokeBiStreamMethod()) + .addMethod(getInvokeMethod()) + .build(); + } + } + } + return result; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/EventMeshAdminService.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/EventMeshAdminService.java new file mode 100644 index 0000000000..d67ed4159a --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/EventMeshAdminService.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.protocol.grpc.adminserver;// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: event_mesh_admin_service.proto + +public final class EventMeshAdminService { + + private EventMeshAdminService() { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + + static final com.google.protobuf.Descriptors.Descriptor + internal_static_Metadata_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_Metadata_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_Metadata_HeadersEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_Metadata_HeadersEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_Payload_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_Payload_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + + static { + String[] descriptorData = { + "\n\036event_mesh_admin_service.proto\032\031google" + + "/protobuf/any.proto\"q\n\010Metadata\022\014\n\004type\030" + + "\003 \001(\t\022\'\n\007headers\030\007 \003(\0132\026.Metadata.Header" + + "sEntry\032.\n\014HeadersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005v" + + "alue\030\002 \001(\t:\0028\001\"4\n\007Payload\022\033\n\010metadata\030\002 " + + "\001(\0132\t.Metadata\022\014\n\004body\030\003 \001(\0142Z\n\014AdminSer" + + "vice\022*\n\016invokeBiStream\022\010.Payload\032\010.Paylo" + + "ad\"\000(\0010\001\022\036\n\006invoke\022\010.Payload\032\010.Payload\"\000" + + "B\002P\001b\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.protobuf.AnyProto.getDescriptor(), + }); + internal_static_Metadata_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Metadata_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Metadata_descriptor, + new String[] {"Type", "Headers",}); + internal_static_Metadata_HeadersEntry_descriptor = + internal_static_Metadata_descriptor.getNestedTypes().get(0); + internal_static_Metadata_HeadersEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Metadata_HeadersEntry_descriptor, + new String[] {"Key", "Value",}); + internal_static_Payload_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Payload_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Payload_descriptor, + new String[] {"Metadata", "Body",}); + com.google.protobuf.AnyProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/Metadata.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/Metadata.java new file mode 100644 index 0000000000..f25754f288 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/Metadata.java @@ -0,0 +1,962 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.protocol.grpc.adminserver;// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: event_mesh_admin_service.proto + +/** + * Protobuf type {@code Metadata} + */ +public final class Metadata extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:Metadata) + MetadataOrBuilder { + + private static final long serialVersionUID = 0L; + + // Use Metadata.newBuilder() to construct. + private Metadata(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Metadata() { + type_ = ""; + } + + @Override + @SuppressWarnings({"unused"}) + protected Object newInstance( + UnusedPrivateParameter unused) { + return new Metadata(); + } + + @Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + + private Metadata( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 26: { + String s = input.readStringRequireUtf8(); + + type_ = s; + break; + } + case 58: { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + headers_ = com.google.protobuf.MapField.newMapField( + HeadersDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000001; + } + com.google.protobuf.MapEntry + headers__ = input.readMessage( + HeadersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + headers_.getMutableMap().put( + headers__.getKey(), headers__.getValue()); + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return EventMeshAdminService.internal_static_Metadata_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 7: + return internalGetHeaders(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + + @Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return EventMeshAdminService.internal_static_Metadata_fieldAccessorTable + .ensureFieldAccessorsInitialized( + Metadata.class, Builder.class); + } + + public static final int TYPE_FIELD_NUMBER = 3; + private volatile Object type_; + + /** + * string type = 3; + * + * @return The type. + */ + @Override + public String getType() { + Object ref = type_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + type_ = s; + return s; + } + } + + /** + * string type = 3; + * + * @return The bytes for type. + */ + @Override + public com.google.protobuf.ByteString + getTypeBytes() { + Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int HEADERS_FIELD_NUMBER = 7; + + private static final class HeadersDefaultEntryHolder { + + static final com.google.protobuf.MapEntry< + String, String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + EventMeshAdminService.internal_static_Metadata_HeadersEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + + private com.google.protobuf.MapField< + String, String> headers_; + + private com.google.protobuf.MapField + internalGetHeaders() { + if (headers_ == null) { + return com.google.protobuf.MapField.emptyMapField( + HeadersDefaultEntryHolder.defaultEntry); + } + return headers_; + } + + public int getHeadersCount() { + return internalGetHeaders().getMap().size(); + } + + /** + * map<string, string> headers = 7; + */ + + @Override + public boolean containsHeaders( + String key) { + if (key == null) { + throw new NullPointerException(); + } + return internalGetHeaders().getMap().containsKey(key); + } + + /** + * Use {@link #getHeadersMap()} instead. + */ + @Override + @Deprecated + public java.util.Map getHeaders() { + return getHeadersMap(); + } + + /** + * map<string, string> headers = 7; + */ + @Override + + public java.util.Map getHeadersMap() { + return internalGetHeaders().getMap(); + } + + /** + * map<string, string> headers = 7; + */ + @Override + + public String getHeadersOrDefault( + String key, + String defaultValue) { + if (key == null) { + throw new NullPointerException(); + } + java.util.Map map = + internalGetHeaders().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + + /** + * map<string, string> headers = 7; + */ + @Override + + public String getHeadersOrThrow( + String key) { + if (key == null) { + throw new NullPointerException(); + } + java.util.Map map = + internalGetHeaders().getMap(); + if (!map.containsKey(key)) { + throw new IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + + @Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) { + return true; + } + if (isInitialized == 0) { + return false; + } + + memoizedIsInitialized = 1; + return true; + } + + @Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, type_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetHeaders(), + HeadersDefaultEntryHolder.defaultEntry, + 7); + unknownFields.writeTo(output); + } + + @Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) { + return size; + } + + size = 0; + if (!getTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, type_); + } + for (java.util.Map.Entry entry + : internalGetHeaders().getMap().entrySet()) { + com.google.protobuf.MapEntry + headers__ = HeadersDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, headers__); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof Metadata)) { + return super.equals(obj); + } + Metadata other = (Metadata) obj; + + if (!getType() + .equals(other.getType())) { + return false; + } + if (!internalGetHeaders().equals( + other.internalGetHeaders())) { + return false; + } + if (!unknownFields.equals(other.unknownFields)) { + return false; + } + return true; + } + + @Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + if (!internalGetHeaders().getMap().isEmpty()) { + hash = (37 * hash) + HEADERS_FIELD_NUMBER; + hash = (53 * hash) + internalGetHeaders().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static Metadata parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Metadata parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Metadata parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Metadata parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Metadata parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Metadata parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Metadata parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + + public static Metadata parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static Metadata parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static Metadata parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + + public static Metadata parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + + public static Metadata parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(Metadata prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + + /** + * Protobuf type {@code Metadata} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:Metadata) + MetadataOrBuilder { + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return EventMeshAdminService.internal_static_Metadata_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 7: + return internalGetHeaders(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 7: + return internalGetMutableHeaders(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + + @Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return EventMeshAdminService.internal_static_Metadata_fieldAccessorTable + .ensureFieldAccessorsInitialized( + Metadata.class, Builder.class); + } + + // Construct using Metadata.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + + @Override + public Builder clear() { + super.clear(); + type_ = ""; + + internalGetMutableHeaders().clear(); + return this; + } + + @Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return EventMeshAdminService.internal_static_Metadata_descriptor; + } + + @Override + public Metadata getDefaultInstanceForType() { + return Metadata.getDefaultInstance(); + } + + @Override + public Metadata build() { + Metadata result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @Override + public Metadata buildPartial() { + Metadata result = new Metadata(this); + int from_bitField0_ = bitField0_; + result.type_ = type_; + result.headers_ = internalGetHeaders(); + result.headers_.makeImmutable(); + onBuilt(); + return result; + } + + @Override + public Builder clone() { + return super.clone(); + } + + @Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return super.setField(field, value); + } + + @Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return super.setRepeatedField(field, index, value); + } + + @Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return super.addRepeatedField(field, value); + } + + @Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof Metadata) { + return mergeFrom((Metadata) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(Metadata other) { + if (other == Metadata.getDefaultInstance()) { + return this; + } + if (!other.getType().isEmpty()) { + type_ = other.type_; + onChanged(); + } + internalGetMutableHeaders().mergeFrom( + other.internalGetHeaders()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @Override + public final boolean isInitialized() { + return true; + } + + @Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Metadata parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (Metadata) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private Object type_ = ""; + + /** + * string type = 3; + * + * @return The type. + */ + public String getType() { + Object ref = type_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + type_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string type = 3; + * + * @return The bytes for type. + */ + public com.google.protobuf.ByteString + getTypeBytes() { + Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string type = 3; + * + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType( + String value) { + if (value == null) { + throw new NullPointerException(); + } + + type_ = value; + onChanged(); + return this; + } + + /** + * string type = 3; + * + * @return This builder for chaining. + */ + public Builder clearType() { + + type_ = getDefaultInstance().getType(); + onChanged(); + return this; + } + + /** + * string type = 3; + * + * @param value The bytes for type to set. + * @return This builder for chaining. + */ + public Builder setTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + type_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + String, String> headers_; + + private com.google.protobuf.MapField + internalGetHeaders() { + if (headers_ == null) { + return com.google.protobuf.MapField.emptyMapField( + HeadersDefaultEntryHolder.defaultEntry); + } + return headers_; + } + + private com.google.protobuf.MapField + internalGetMutableHeaders() { + onChanged(); + ; + if (headers_ == null) { + headers_ = com.google.protobuf.MapField.newMapField( + HeadersDefaultEntryHolder.defaultEntry); + } + if (!headers_.isMutable()) { + headers_ = headers_.copy(); + } + return headers_; + } + + public int getHeadersCount() { + return internalGetHeaders().getMap().size(); + } + + /** + * map<string, string> headers = 7; + */ + + @Override + public boolean containsHeaders( + String key) { + if (key == null) { + throw new NullPointerException(); + } + return internalGetHeaders().getMap().containsKey(key); + } + + /** + * Use {@link #getHeadersMap()} instead. + */ + @Override + @Deprecated + public java.util.Map getHeaders() { + return getHeadersMap(); + } + + /** + * map<string, string> headers = 7; + */ + @Override + + public java.util.Map getHeadersMap() { + return internalGetHeaders().getMap(); + } + + /** + * map<string, string> headers = 7; + */ + @Override + + public String getHeadersOrDefault( + String key, + String defaultValue) { + if (key == null) { + throw new NullPointerException(); + } + java.util.Map map = + internalGetHeaders().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + + /** + * map<string, string> headers = 7; + */ + @Override + + public String getHeadersOrThrow( + String key) { + if (key == null) { + throw new NullPointerException(); + } + java.util.Map map = + internalGetHeaders().getMap(); + if (!map.containsKey(key)) { + throw new IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearHeaders() { + internalGetMutableHeaders().getMutableMap() + .clear(); + return this; + } + + /** + * map<string, string> headers = 7; + */ + + public Builder removeHeaders( + String key) { + if (key == null) { + throw new NullPointerException(); + } + internalGetMutableHeaders().getMutableMap() + .remove(key); + return this; + } + + /** + * Use alternate mutation accessors instead. + */ + @Deprecated + public java.util.Map + getMutableHeaders() { + return internalGetMutableHeaders().getMutableMap(); + } + + /** + * map<string, string> headers = 7; + */ + public Builder putHeaders( + String key, + String value) { + if (key == null) { + throw new NullPointerException(); + } + if (value == null) { + throw new NullPointerException(); + } + internalGetMutableHeaders().getMutableMap() + .put(key, value); + return this; + } + + /** + * map<string, string> headers = 7; + */ + + public Builder putAllHeaders( + java.util.Map values) { + internalGetMutableHeaders().getMutableMap() + .putAll(values); + return this; + } + + @Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:Metadata) + } + + // @@protoc_insertion_point(class_scope:Metadata) + private static final Metadata DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new Metadata(); + } + + public static Metadata getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @Override + public Metadata parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Metadata(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @Override + public Metadata getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/MetadataOrBuilder.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/MetadataOrBuilder.java new file mode 100644 index 0000000000..7afef491f8 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/MetadataOrBuilder.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.protocol.grpc.adminserver;// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: event_mesh_admin_service.proto + +public interface MetadataOrBuilder extends + // @@protoc_insertion_point(interface_extends:Metadata) + com.google.protobuf.MessageOrBuilder { + + /** + * string type = 3; + * + * @return The type. + */ + String getType(); + + /** + * string type = 3; + * + * @return The bytes for type. + */ + com.google.protobuf.ByteString + getTypeBytes(); + + /** + * map<string, string> headers = 7; + */ + int getHeadersCount(); + + /** + * map<string, string> headers = 7; + */ + boolean containsHeaders( + String key); + + /** + * Use {@link #getHeadersMap()} instead. + */ + @Deprecated + java.util.Map + getHeaders(); + + /** + * map<string, string> headers = 7; + */ + java.util.Map + getHeadersMap(); + + /** + * map<string, string> headers = 7; + */ + + String getHeadersOrDefault( + String key, + String defaultValue); + + /** + * map<string, string> headers = 7; + */ + + String getHeadersOrThrow( + String key); +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/Payload.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/Payload.java new file mode 100644 index 0000000000..a0067099a0 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/Payload.java @@ -0,0 +1,893 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.protocol.grpc.adminserver;// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: event_mesh_admin_service.proto + +/** + * Protobuf type {@code Payload} + */ +public final class Payload extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:Payload) + PayloadOrBuilder { + + private static final long serialVersionUID = 0L; + + // Use Payload.newBuilder() to construct. + private Payload(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Payload() { + } + + @Override + @SuppressWarnings({"unused"}) + protected Object newInstance( + UnusedPrivateParameter unused) { + return new Payload(); + } + + @Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + + private Payload( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 18: { + Metadata.Builder subBuilder = null; + if (metadata_ != null) { + subBuilder = metadata_.toBuilder(); + } + metadata_ = input.readMessage(Metadata.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(metadata_); + metadata_ = subBuilder.buildPartial(); + } + + break; + } + case 26: { + com.google.protobuf.Any.Builder subBuilder = null; + if (body_ != null) { + subBuilder = body_.toBuilder(); + } + body_ = input.readMessage(com.google.protobuf.Any.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(body_); + body_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return EventMeshAdminService.internal_static_Payload_descriptor; + } + + @Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return EventMeshAdminService.internal_static_Payload_fieldAccessorTable + .ensureFieldAccessorsInitialized( + Payload.class, Builder.class); + } + + public static final int METADATA_FIELD_NUMBER = 2; + private Metadata metadata_; + + /** + * .Metadata metadata = 2; + * + * @return Whether the metadata field is set. + */ + @Override + public boolean hasMetadata() { + return metadata_ != null; + } + + /** + * .Metadata metadata = 2; + * + * @return The metadata. + */ + @Override + public Metadata getMetadata() { + return metadata_ == null ? Metadata.getDefaultInstance() : metadata_; + } + + /** + * .Metadata metadata = 2; + */ + @Override + public MetadataOrBuilder getMetadataOrBuilder() { + return getMetadata(); + } + + public static final int BODY_FIELD_NUMBER = 3; + private com.google.protobuf.Any body_; + + /** + * .google.protobuf.Any body = 3; + * + * @return Whether the body field is set. + */ + @Override + public boolean hasBody() { + return body_ != null; + } + + /** + * .google.protobuf.Any body = 3; + * + * @return The body. + */ + @Override + public com.google.protobuf.Any getBody() { + return body_ == null ? com.google.protobuf.Any.getDefaultInstance() : body_; + } + + /** + * .google.protobuf.Any body = 3; + */ + @Override + public com.google.protobuf.AnyOrBuilder getBodyOrBuilder() { + return getBody(); + } + + private byte memoizedIsInitialized = -1; + + @Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) { + return true; + } + if (isInitialized == 0) { + return false; + } + + memoizedIsInitialized = 1; + return true; + } + + @Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (metadata_ != null) { + output.writeMessage(2, getMetadata()); + } + if (body_ != null) { + output.writeMessage(3, getBody()); + } + unknownFields.writeTo(output); + } + + @Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) { + return size; + } + + size = 0; + if (metadata_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getMetadata()); + } + if (body_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getBody()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof Payload)) { + return super.equals(obj); + } + Payload other = (Payload) obj; + + if (hasMetadata() != other.hasMetadata()) { + return false; + } + if (hasMetadata()) { + if (!getMetadata() + .equals(other.getMetadata())) { + return false; + } + } + if (hasBody() != other.hasBody()) { + return false; + } + if (hasBody()) { + if (!getBody() + .equals(other.getBody())) { + return false; + } + } + if (!unknownFields.equals(other.unknownFields)) { + return false; + } + return true; + } + + @Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasMetadata()) { + hash = (37 * hash) + METADATA_FIELD_NUMBER; + hash = (53 * hash) + getMetadata().hashCode(); + } + if (hasBody()) { + hash = (37 * hash) + BODY_FIELD_NUMBER; + hash = (53 * hash) + getBody().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static Payload parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Payload parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Payload parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Payload parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Payload parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Payload parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Payload parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + + public static Payload parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static Payload parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static Payload parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + + public static Payload parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + + public static Payload parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(Payload prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + + /** + * Protobuf type {@code Payload} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:Payload) + PayloadOrBuilder { + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return EventMeshAdminService.internal_static_Payload_descriptor; + } + + @Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return EventMeshAdminService.internal_static_Payload_fieldAccessorTable + .ensureFieldAccessorsInitialized( + Payload.class, Builder.class); + } + + // Construct using Payload.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + + @Override + public Builder clear() { + super.clear(); + if (metadataBuilder_ == null) { + metadata_ = null; + } else { + metadata_ = null; + metadataBuilder_ = null; + } + if (bodyBuilder_ == null) { + body_ = null; + } else { + body_ = null; + bodyBuilder_ = null; + } + return this; + } + + @Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return EventMeshAdminService.internal_static_Payload_descriptor; + } + + @Override + public Payload getDefaultInstanceForType() { + return Payload.getDefaultInstance(); + } + + @Override + public Payload build() { + Payload result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @Override + public Payload buildPartial() { + Payload result = new Payload(this); + if (metadataBuilder_ == null) { + result.metadata_ = metadata_; + } else { + result.metadata_ = metadataBuilder_.build(); + } + if (bodyBuilder_ == null) { + result.body_ = body_; + } else { + result.body_ = bodyBuilder_.build(); + } + onBuilt(); + return result; + } + + @Override + public Builder clone() { + return super.clone(); + } + + @Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return super.setField(field, value); + } + + @Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return super.setRepeatedField(field, index, value); + } + + @Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return super.addRepeatedField(field, value); + } + + @Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof Payload) { + return mergeFrom((Payload) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(Payload other) { + if (other == Payload.getDefaultInstance()) { + return this; + } + if (other.hasMetadata()) { + mergeMetadata(other.getMetadata()); + } + if (other.hasBody()) { + mergeBody(other.getBody()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @Override + public final boolean isInitialized() { + return true; + } + + @Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Payload parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (Payload) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private Metadata metadata_; + private com.google.protobuf.SingleFieldBuilderV3< + Metadata, Metadata.Builder, MetadataOrBuilder> metadataBuilder_; + + /** + * .Metadata metadata = 2; + * + * @return Whether the metadata field is set. + */ + public boolean hasMetadata() { + return metadataBuilder_ != null || metadata_ != null; + } + + /** + * .Metadata metadata = 2; + * + * @return The metadata. + */ + public Metadata getMetadata() { + if (metadataBuilder_ == null) { + return metadata_ == null ? Metadata.getDefaultInstance() : metadata_; + } else { + return metadataBuilder_.getMessage(); + } + } + + /** + * .Metadata metadata = 2; + */ + public Builder setMetadata(Metadata value) { + if (metadataBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + metadata_ = value; + onChanged(); + } else { + metadataBuilder_.setMessage(value); + } + + return this; + } + + /** + * .Metadata metadata = 2; + */ + public Builder setMetadata( + Metadata.Builder builderForValue) { + if (metadataBuilder_ == null) { + metadata_ = builderForValue.build(); + onChanged(); + } else { + metadataBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + + /** + * .Metadata metadata = 2; + */ + public Builder mergeMetadata(Metadata value) { + if (metadataBuilder_ == null) { + if (metadata_ != null) { + metadata_ = + Metadata.newBuilder(metadata_).mergeFrom(value).buildPartial(); + } else { + metadata_ = value; + } + onChanged(); + } else { + metadataBuilder_.mergeFrom(value); + } + + return this; + } + + /** + * .Metadata metadata = 2; + */ + public Builder clearMetadata() { + if (metadataBuilder_ == null) { + metadata_ = null; + onChanged(); + } else { + metadata_ = null; + metadataBuilder_ = null; + } + + return this; + } + + /** + * .Metadata metadata = 2; + */ + public Metadata.Builder getMetadataBuilder() { + + onChanged(); + return getMetadataFieldBuilder().getBuilder(); + } + + /** + * .Metadata metadata = 2; + */ + public MetadataOrBuilder getMetadataOrBuilder() { + if (metadataBuilder_ != null) { + return metadataBuilder_.getMessageOrBuilder(); + } else { + return metadata_ == null ? + Metadata.getDefaultInstance() : metadata_; + } + } + + /** + * .Metadata metadata = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + Metadata, Metadata.Builder, MetadataOrBuilder> + getMetadataFieldBuilder() { + if (metadataBuilder_ == null) { + metadataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + Metadata, Metadata.Builder, MetadataOrBuilder>( + getMetadata(), + getParentForChildren(), + isClean()); + metadata_ = null; + } + return metadataBuilder_; + } + + private com.google.protobuf.Any body_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder> bodyBuilder_; + + /** + * .google.protobuf.Any body = 3; + * + * @return Whether the body field is set. + */ + public boolean hasBody() { + return bodyBuilder_ != null || body_ != null; + } + + /** + * .google.protobuf.Any body = 3; + * + * @return The body. + */ + public com.google.protobuf.Any getBody() { + if (bodyBuilder_ == null) { + return body_ == null ? com.google.protobuf.Any.getDefaultInstance() : body_; + } else { + return bodyBuilder_.getMessage(); + } + } + + /** + * .google.protobuf.Any body = 3; + */ + public Builder setBody(com.google.protobuf.Any value) { + if (bodyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + body_ = value; + onChanged(); + } else { + bodyBuilder_.setMessage(value); + } + + return this; + } + + /** + * .google.protobuf.Any body = 3; + */ + public Builder setBody( + com.google.protobuf.Any.Builder builderForValue) { + if (bodyBuilder_ == null) { + body_ = builderForValue.build(); + onChanged(); + } else { + bodyBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + + /** + * .google.protobuf.Any body = 3; + */ + public Builder mergeBody(com.google.protobuf.Any value) { + if (bodyBuilder_ == null) { + if (body_ != null) { + body_ = + com.google.protobuf.Any.newBuilder(body_).mergeFrom(value).buildPartial(); + } else { + body_ = value; + } + onChanged(); + } else { + bodyBuilder_.mergeFrom(value); + } + + return this; + } + + /** + * .google.protobuf.Any body = 3; + */ + public Builder clearBody() { + if (bodyBuilder_ == null) { + body_ = null; + onChanged(); + } else { + body_ = null; + bodyBuilder_ = null; + } + + return this; + } + + /** + * .google.protobuf.Any body = 3; + */ + public com.google.protobuf.Any.Builder getBodyBuilder() { + + onChanged(); + return getBodyFieldBuilder().getBuilder(); + } + + /** + * .google.protobuf.Any body = 3; + */ + public com.google.protobuf.AnyOrBuilder getBodyOrBuilder() { + if (bodyBuilder_ != null) { + return bodyBuilder_.getMessageOrBuilder(); + } else { + return body_ == null ? + com.google.protobuf.Any.getDefaultInstance() : body_; + } + } + + /** + * .google.protobuf.Any body = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder> + getBodyFieldBuilder() { + if (bodyBuilder_ == null) { + bodyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder>( + getBody(), + getParentForChildren(), + isClean()); + body_ = null; + } + return bodyBuilder_; + } + + @Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:Payload) + } + + // @@protoc_insertion_point(class_scope:Payload) + private static final Payload DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new Payload(); + } + + public static Payload getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @Override + public Payload parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Payload(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @Override + public Payload getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/PayloadOrBuilder.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/PayloadOrBuilder.java new file mode 100644 index 0000000000..a50a340e1b --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/adminserver/PayloadOrBuilder.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.protocol.grpc.adminserver;// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: event_mesh_admin_service.proto + +public interface PayloadOrBuilder extends + // @@protoc_insertion_point(interface_extends:Payload) + com.google.protobuf.MessageOrBuilder { + + /** + * .Metadata metadata = 2; + * + * @return Whether the metadata field is set. + */ + boolean hasMetadata(); + + /** + * .Metadata metadata = 2; + * + * @return The metadata. + */ + Metadata getMetadata(); + + /** + * .Metadata metadata = 2; + */ + MetadataOrBuilder getMetadataOrBuilder(); + + /** + * .google.protobuf.Any body = 3; + * + * @return Whether the body field is set. + */ + boolean hasBody(); + + /** + * .google.protobuf.Any body = 3; + * + * @return The body. + */ + com.google.protobuf.Any getBody(); + + /** + * .google.protobuf.Any body = 3; + */ + com.google.protobuf.AnyOrBuilder getBodyOrBuilder(); +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/proto/event_mesh_admin_service.proto b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/proto/event_mesh_admin_service.proto new file mode 100644 index 0000000000..fa7095840f --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/protocol/grpc/proto/event_mesh_admin_service.proto @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax="proto3"; + +import "google/protobuf/any.proto"; + +option java_multiple_files=true; + +message Metadata { + string type = 3; + map headers = 7; +} + + +message Payload { + Metadata metadata = 2; + bytes body = 3; +} + +service AdminService { + rpc invokeBiStream(stream Payload) returns (stream Payload){} + rpc invoke(Payload) returns (Payload){} +} + diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java new file mode 100644 index 0000000000..da9daffe9c --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/JobState.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote; + +import java.util.HashMap; +import java.util.Map; + +import lombok.ToString; + +@ToString +public enum JobState { + INIT, RUNNING, COMPLETE, DELETE, FAIL; + private static final JobState[] STATES_NUM_INDEX = JobState.values(); + private static final Map STATES_NAME_INDEX = new HashMap<>(); + + static { + for (JobState jobState : STATES_NUM_INDEX) { + STATES_NAME_INDEX.put(jobState.name(), jobState); + } + } + + public static JobState fromIndex(Integer index) { + if (index == null || index < 0 || index >= STATES_NUM_INDEX.length) { + return null; + } + + return STATES_NUM_INDEX[index]; + } + + public static JobState fromIndex(String index) { + if (index == null || index.isEmpty()) { + return null; + } + + return STATES_NAME_INDEX.get(index); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TaskState.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TaskState.java new file mode 100644 index 0000000000..606339c443 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TaskState.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote; + +import java.util.HashMap; +import java.util.Map; + +import lombok.ToString; + +@ToString +public enum TaskState { + INIT, STARTED, RUNNING, PAUSE, COMPLETE, DELETE, FAIL; + private static final TaskState[] STATES_NUM_INDEX = TaskState.values(); + private static final Map STATES_NAME_INDEX = new HashMap<>(); + static { + for (TaskState jobState : STATES_NUM_INDEX) { + STATES_NAME_INDEX.put(jobState.name(), jobState); + } + } + + public static TaskState fromIndex(Integer index) { + if (index == null || index < 0 || index >= STATES_NUM_INDEX.length) { + return null; + } + + return STATES_NUM_INDEX[index]; + } + + public static TaskState fromIndex(String index) { + if (index == null || index.isEmpty()) { + return null; + } + + return STATES_NAME_INDEX.get(index); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java new file mode 100644 index 0000000000..6b43598398 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/TransportType.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote; + +import org.apache.eventmesh.common.remote.datasource.DataSourceType; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Getter; + +@Getter +public enum TransportType { + MYSQL_MYSQL(DataSourceType.MYSQL, DataSourceType.MYSQL), + REDIS_REDIS(DataSourceType.REDIS, DataSourceType.REDIS), + ROCKETMQ_ROCKETMQ(DataSourceType.ROCKETMQ, DataSourceType.ROCKETMQ), + MYSQL_HTTP(DataSourceType.MYSQL, DataSourceType.HTTP), + ROCKETMQ_HTTP(DataSourceType.ROCKETMQ, DataSourceType.HTTP), + HTTP_MYSQL(DataSourceType.HTTP, DataSourceType.MYSQL), + HTTP_REDIS(DataSourceType.HTTP, DataSourceType.REDIS), + HTTP_ROCKETMQ(DataSourceType.HTTP, DataSourceType.ROCKETMQ), + REDIS_MQ(DataSourceType.REDIS, DataSourceType.ROCKETMQ), + HTTP_HTTP(DataSourceType.HTTP, DataSourceType.HTTP), + ; + private static final Map INDEX_TYPES = new HashMap<>(); + private static final TransportType[] TYPES = TransportType.values(); + private static final String SEPARATOR = "@"; + + static { + for (TransportType type : TYPES) { + INDEX_TYPES.put(type.name(), type); + } + } + + private final DataSourceType src; + + private final DataSourceType dst; + + TransportType(DataSourceType src, DataSourceType dst) { + this.src = src; + this.dst = dst; + } + + + public static TransportType getTransportType(String index) { + if (index == null || index.isEmpty()) { + return null; + } + return INDEX_TYPES.get(index); + } + + public static TransportType getTransportType(Integer index) { + if (index == null || index < 0 || index >= TYPES.length) { + return null; + } + return TYPES[index]; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java new file mode 100644 index 0000000000..afda984805 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSource.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.datasource; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +import lombok.Data; + +@Data +public class DataSource { + + private DataSourceType type; + + private String desc; + @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) + @JsonSubTypes({ + @JsonSubTypes.Type(value = CanalSourceConfig.class, name = "CanalSourceConfig"), + @JsonSubTypes.Type(value = CanalSinkConfig.class, name = "CanalSinkConfig") + }) + private Config conf; + + private Class confClazz; + + private String region; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceClassify.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceClassify.java new file mode 100644 index 0000000000..8cb01c9204 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceClassify.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.datasource; + +public enum DataSourceClassify { + // relationship db + RDB, + MQ, + CACHE, + TUNNEL; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceConf.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceConf.java new file mode 100644 index 0000000000..9701a9fa11 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceConf.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.datasource; + + +public abstract class DataSourceConf { + public abstract Class getConfClass(); +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceDriverType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceDriverType.java new file mode 100644 index 0000000000..4429bee5a9 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceDriverType.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.datasource; + +public enum DataSourceDriverType { + MYSQL, + REDIS, + ROCKETMQ, + HTTP; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java new file mode 100644 index 0000000000..8c40971e7b --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/datasource/DataSourceType.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.datasource; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Getter; +import lombok.ToString; + +@Getter +@ToString +public enum DataSourceType { + MYSQL("MySQL", DataSourceDriverType.MYSQL, DataSourceClassify.RDB), + REDIS("Redis", DataSourceDriverType.REDIS, DataSourceClassify.CACHE), + ROCKETMQ("RocketMQ", DataSourceDriverType.ROCKETMQ, DataSourceClassify.MQ), + HTTP("HTTP", DataSourceDriverType.HTTP, DataSourceClassify.TUNNEL); + private static final Map INDEX_TYPES = new HashMap<>(); + private static final DataSourceType[] TYPES = DataSourceType.values(); + static { + for (DataSourceType type : TYPES) { + INDEX_TYPES.put(type.name(), type); + } + } + + private final String name; + private final DataSourceDriverType driverType; + private final DataSourceClassify classify; + + DataSourceType(String name, DataSourceDriverType driverType, DataSourceClassify classify) { + this.name = name; + this.driverType = driverType; + this.classify = classify; + } + + public static DataSourceType getDataSourceType(String index) { + if (index == null || index.isEmpty()) { + return null; + } + return INDEX_TYPES.get(index); + } + + public static DataSourceType getDataSourceType(Integer index) { + if (index == null || index < 0 || index >= TYPES.length) { + return null; + } + return TYPES[index]; + } + + public static DataSourceType fromString(String type) { + for (DataSourceType dataSourceType : DataSourceType.values()) { + if (dataSourceType.name().equalsIgnoreCase(type)) { + return dataSourceType; + } + } + throw new IllegalArgumentException("No enum constant for type: " + type); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/ErrorCode.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/ErrorCode.java new file mode 100644 index 0000000000..f24971acc7 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/ErrorCode.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.exception; + +public class ErrorCode { + + public static final int SUCCESS = 0; + public static final int BAD_REQUEST = 4001; + public static final int BAD_DB_DATA = 4002; + + public static final int INTERNAL_ERR = 5000; + public static final int STARTUP_CONFIG_MISS = 5001; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/PayloadFormatException.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/PayloadFormatException.java new file mode 100644 index 0000000000..affa6b8bbf --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/PayloadFormatException.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.exception; + +public class PayloadFormatException extends RemoteRuntimeException { + + public PayloadFormatException(int code, String desc) { + super(code, desc); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/RemoteRuntimeException.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/RemoteRuntimeException.java new file mode 100644 index 0000000000..708a558d37 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/exception/RemoteRuntimeException.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.exception; + +public class RemoteRuntimeException extends RuntimeException { + + protected final int code; + protected final String message; + + public RemoteRuntimeException(int code, String message) { + this.code = code; + this.message = message; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/JobConnectorConfig.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/JobConnectorConfig.java new file mode 100644 index 0000000000..14e8178cf3 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/JobConnectorConfig.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.job; + +import java.util.Map; + +import lombok.Data; + +/** + * Description: + */ +@Data +public class JobConnectorConfig { + private Map sourceConnectorConfig; + + private String sourceConnectorDesc; + + private Map sinkConnectorConfig; + + private String sinkConnectorDesc; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/JobType.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/JobType.java new file mode 100644 index 0000000000..83d2f56964 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/JobType.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.job; + +import java.util.HashMap; +import java.util.Map; + +public enum JobType { + FULL, + INCREASE, + CHECK; + + private static final JobType[] STATES_NUM_INDEX = JobType.values(); + private static final Map STATES_NAME_INDEX = new HashMap<>(); + static { + for (JobType jobType : STATES_NUM_INDEX) { + STATES_NAME_INDEX.put(jobType.name(), jobType); + } + } + + public static JobType fromIndex(Integer index) { + if (index == null || index < 0 || index > STATES_NUM_INDEX.length) { + return null; + } + + return STATES_NUM_INDEX[index]; + } + + public static JobType fromIndex(String index) { + if (index == null || index.isEmpty()) { + return null; + } + + return STATES_NAME_INDEX.get(index); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/SyncConsistency.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/SyncConsistency.java new file mode 100644 index 0000000000..a5aec2aa38 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/SyncConsistency.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.job; + +public enum SyncConsistency { + /** + * based with media + */ + MEDIA("M"), + /** + * based with store + */ + STORE("S"), + /** + * Based on the current change value, eventual consistency + */ + BASE("B"); + + private String value; + + SyncConsistency(String value) { + this.value = value; + } + + public static SyncConsistency valuesOf(String value) { + SyncConsistency[] modes = values(); + for (SyncConsistency mode : modes) { + if (mode.value.equalsIgnoreCase(value)) { + return mode; + } + } + return null; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public boolean isMedia() { + return this.equals(SyncConsistency.MEDIA); + } + + public boolean isStore() { + return this.equals(SyncConsistency.STORE); + } + + public boolean isBase() { + return this.equals(SyncConsistency.BASE); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/SyncMode.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/SyncMode.java new file mode 100644 index 0000000000..0f2f9bdfcb --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/job/SyncMode.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.job; + +public enum SyncMode { + /** + * row + */ + ROW("R"), + /** + * field + */ + FIELD("F"); + + private String value; + + SyncMode(String value) { + this.value = value; + } + + public static SyncMode valuesOf(String value) { + SyncMode[] modes = values(); + for (SyncMode mode : modes) { + if (mode.value.equalsIgnoreCase(value)) { + return mode; + } + } + return null; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public boolean isRow() { + return this.equals(SyncMode.ROW); + } + + public boolean isField() { + return this.equals(SyncMode.FIELD); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordOffset.java new file mode 100644 index 0000000000..f78585ca15 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordOffset.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset; + +public abstract class RecordOffset { + + public abstract Class getRecordOffsetClass(); + + public RecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordPartition.java new file mode 100644 index 0000000000..00e4c30e48 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordPartition.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset; + +public abstract class RecordPartition { + + public abstract Class getRecordPartitionClass(); + + public RecordPartition() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordPosition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordPosition.java new file mode 100644 index 0000000000..5f45390b73 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/RecordPosition.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset; + +import org.apache.eventmesh.common.remote.offset.S3.S3RecordOffset; +import org.apache.eventmesh.common.remote.offset.S3.S3RecordPartition; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordPartition; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordOffset; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordPartition; +import org.apache.eventmesh.common.remote.offset.file.FileRecordOffset; +import org.apache.eventmesh.common.remote.offset.file.FileRecordPartition; +import org.apache.eventmesh.common.remote.offset.kafka.KafkaRecordOffset; +import org.apache.eventmesh.common.remote.offset.kafka.KafkaRecordPartition; +import org.apache.eventmesh.common.remote.offset.pulsar.PulsarRecordOffset; +import org.apache.eventmesh.common.remote.offset.pulsar.PulsarRecordPartition; +import org.apache.eventmesh.common.remote.offset.rocketmq.RocketMQRecordOffset; +import org.apache.eventmesh.common.remote.offset.rocketmq.RocketMQRecordPartition; + +import java.util.Objects; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +public class RecordPosition { + + @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) + @JsonSubTypes({ + @JsonSubTypes.Type(value = CanalRecordPartition.class, name = "CanalRecordPartition"), + @JsonSubTypes.Type(value = CanalFullRecordPartition.class, name = "CanalFullRecordPartition"), + @JsonSubTypes.Type(value = FileRecordPartition.class, name = "FileRecordPartition"), + @JsonSubTypes.Type(value = S3RecordPartition.class, name = "S3RecordPartition"), + @JsonSubTypes.Type(value = KafkaRecordPartition.class, name = "KafkaRecordPartition"), + @JsonSubTypes.Type(value = PulsarRecordPartition.class, name = "PulsarRecordPartition"), + @JsonSubTypes.Type(value = RocketMQRecordPartition.class, name = "RocketMQRecordPartition"), + }) + private RecordPartition recordPartition; + + private Class recordPartitionClazz; + + @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) + @JsonSubTypes({ + @JsonSubTypes.Type(value = CanalRecordOffset.class, name = "CanalRecordOffset"), + @JsonSubTypes.Type(value = CanalFullRecordOffset.class, name = "CanalFullRecordOffset"), + @JsonSubTypes.Type(value = FileRecordOffset.class, name = "FileRecordOffset"), + @JsonSubTypes.Type(value = S3RecordOffset.class, name = "S3RecordOffset"), + @JsonSubTypes.Type(value = KafkaRecordOffset.class, name = "KafkaRecordOffset"), + @JsonSubTypes.Type(value = PulsarRecordOffset.class, name = "PulsarRecordOffset"), + @JsonSubTypes.Type(value = RocketMQRecordOffset.class, name = "RocketMQRecordOffset"), + }) + private RecordOffset recordOffset; + + private Class recordOffsetClazz; + + public RecordPosition() { + + } + + public RecordPosition( + RecordPartition recordPartition, RecordOffset recordOffset) { + this.recordPartition = recordPartition; + this.recordOffset = recordOffset; + this.recordPartitionClazz = recordPartition.getRecordPartitionClass(); + this.recordOffsetClazz = recordOffset.getRecordOffsetClass(); + } + + public RecordPartition getRecordPartition() { + return recordPartition; + } + + public void setRecordPartition(RecordPartition recordPartition) { + this.recordPartition = recordPartition; + if (recordPartition == null) { + this.recordPartitionClazz = null; + return; + } + this.recordPartitionClazz = recordPartition.getRecordPartitionClass(); + } + + public RecordOffset getRecordOffset() { + return recordOffset; + } + + public void setRecordOffset(RecordOffset recordOffset) { + this.recordOffset = recordOffset; + if (recordOffset == null) { + this.recordOffsetClazz = null; + return; + } + this.recordOffsetClazz = recordOffset.getRecordOffsetClass(); + } + + public Class getRecordPartitionClazz() { + return recordPartitionClazz; + } + + public Class getRecordOffsetClazz() { + return recordOffsetClazz; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof RecordPosition)) { + return false; + } + RecordPosition position = (RecordPosition) o; + return recordPartition.equals(position.recordPartition) && recordOffset.equals(position.recordOffset); + } + + @Override + public int hashCode() { + return Objects.hash(recordPartition, recordOffset); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/S3/S3RecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/S3/S3RecordOffset.java new file mode 100644 index 0000000000..9f38e4b2c6 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/S3/S3RecordOffset.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.S3; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class S3RecordOffset extends RecordOffset { + + private Long offset; + + @Override + public Class getRecordOffsetClass() { + return S3RecordOffset.class; + } + + public S3RecordOffset() { + + } +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/S3/S3RecordPartition.java similarity index 58% rename from eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordPartition.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/S3/S3RecordPartition.java index 959a6b9b3c..3e42a4d093 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordPartition.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/S3/S3RecordPartition.java @@ -15,32 +15,33 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.offsetmgmt.api.data; +package org.apache.eventmesh.common.remote.offset.S3; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; -import java.util.HashMap; -import java.util.Map; import java.util.Objects; -public class RecordPartition { +import lombok.Data; +import lombok.ToString; - /** - * if pull message from mq key=topic, - * value=topicName key=brokerName, - * value=brokerName key=queueId, - * value=queueId - */ - private Map partition = new HashMap<>(); - public RecordPartition() { +@Data +@ToString +public class S3RecordPartition extends RecordPartition { - } + private String region; + + private String bucket; - public RecordPartition(Map partition) { - this.partition = partition; + private String fileName; + + @Override + public Class getRecordPartitionClass() { + return S3RecordPartition.class; } - public Map getPartition() { - return partition; + public S3RecordPartition() { + super(); } @Override @@ -48,15 +49,15 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof RecordPartition)) { + if (o == null || getClass() != o.getClass()) { return false; } - RecordPartition partition1 = (RecordPartition) o; - return Objects.equals(partition, partition1.partition); + S3RecordPartition that = (S3RecordPartition) o; + return Objects.equals(fileName, that.fileName); } @Override public int hashCode() { - return Objects.hash(partition); + return Objects.hash(fileName); } } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalFullRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalFullRecordOffset.java new file mode 100644 index 0000000000..a0a077b7f5 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalFullRecordOffset.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.canal; + +import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@Data +@EqualsAndHashCode(callSuper = true) +@ToString +public class CanalFullRecordOffset extends RecordOffset { + private JobRdbFullPosition position; + + @Override + public Class getRecordOffsetClass() { + return CanalFullRecordOffset.class; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalFullRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalFullRecordPartition.java new file mode 100644 index 0000000000..73626fa78f --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalFullRecordPartition.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.canal; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + + +@Data +@ToString +@EqualsAndHashCode(callSuper = true) +public class CanalFullRecordPartition extends RecordPartition { + + @Override + public Class getRecordPartitionClass() { + return CanalFullRecordPartition.class; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalRecordOffset.java new file mode 100644 index 0000000000..d0f2053f4d --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalRecordOffset.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.canal; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class CanalRecordOffset extends RecordOffset { + + private Long offset; + + // mysql instance gtid range + private String gtid; + + private String currentGtid; + + @Override + public Class getRecordOffsetClass() { + return CanalRecordOffset.class; + } + + public CanalRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalRecordPartition.java new file mode 100644 index 0000000000..ded82306e3 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/canal/CanalRecordPartition.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.canal; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import java.util.Objects; + +import lombok.Data; +import lombok.ToString; + + +@Data +@ToString +public class CanalRecordPartition extends RecordPartition { + + private String serverUUID; + + private String journalName; + + private Long timeStamp; + + @Override + public Class getRecordPartitionClass() { + return CanalRecordPartition.class; + } + + public CanalRecordPartition() { + super(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CanalRecordPartition that = (CanalRecordPartition) o; + return Objects.equals(journalName, that.journalName) && Objects.equals(timeStamp, that.timeStamp); + } + + @Override + public int hashCode() { + return Objects.hash(journalName, timeStamp); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/file/FileRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/file/FileRecordOffset.java new file mode 100644 index 0000000000..cda293cb8c --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/file/FileRecordOffset.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.file; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class FileRecordOffset extends RecordOffset { + + private Long offset; + + @Override + public Class getRecordOffsetClass() { + return FileRecordOffset.class; + } + + public FileRecordOffset() { + + } +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/file/FileRecordPartition.java similarity index 61% rename from eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffset.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/file/FileRecordPartition.java index fee70de27e..1a6dddad44 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffset.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/file/FileRecordPartition.java @@ -15,30 +15,29 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.offsetmgmt.api.data; +package org.apache.eventmesh.common.remote.offset.file; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; -import java.util.HashMap; -import java.util.Map; import java.util.Objects; -public class RecordOffset { +import lombok.Data; +import lombok.ToString; - /** - * if pull message from mq key=queueOffset, - * value=queueOffset value - */ - private Map offset = new HashMap<>(); - public RecordOffset() { +@Data +@ToString +public class FileRecordPartition extends RecordPartition { - } + private String fileName; - public RecordOffset(Map offset) { - this.offset = offset; + @Override + public Class getRecordPartitionClass() { + return FileRecordPartition.class; } - public Map getOffset() { - return offset; + public FileRecordPartition() { + super(); } @Override @@ -46,16 +45,15 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof RecordOffset)) { + if (o == null || getClass() != o.getClass()) { return false; } - RecordOffset offset1 = (RecordOffset) o; - return Objects.equals(offset, offset1.offset); + FileRecordPartition that = (FileRecordPartition) o; + return Objects.equals(fileName, that.fileName); } @Override public int hashCode() { - return Objects.hash(offset); + return Objects.hash(fileName); } - } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/http/HttpRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/http/HttpRecordOffset.java new file mode 100644 index 0000000000..f5084c755f --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/http/HttpRecordOffset.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.http; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import java.util.Map; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class HttpRecordOffset extends RecordOffset { + + private Map offsetMap; + + @Override + public Class getRecordOffsetClass() { + return HttpRecordOffset.class; + } + + public HttpRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/http/HttpRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/http/HttpRecordPartition.java new file mode 100644 index 0000000000..453b3b501e --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/http/HttpRecordPartition.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.http; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class HttpRecordPartition extends RecordPartition { + + @Override + public Class getRecordPartitionClass() { + return HttpRecordPartition.class; + } + + public HttpRecordPartition() { + super(); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/jdbc/JdbcRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/jdbc/JdbcRecordOffset.java new file mode 100644 index 0000000000..a97a90e658 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/jdbc/JdbcRecordOffset.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.jdbc; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class JdbcRecordOffset extends RecordOffset { + + private Long offset; + + @Override + public Class getRecordOffsetClass() { + return JdbcRecordOffset.class; + } + + public JdbcRecordOffset() { + + } +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordPosition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/jdbc/JdbcRecordPartition.java similarity index 57% rename from eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordPosition.java rename to eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/jdbc/JdbcRecordPartition.java index 0b836ab607..1eb6937a87 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordPosition.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/jdbc/JdbcRecordPartition.java @@ -15,28 +15,29 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.offsetmgmt.api.data; +package org.apache.eventmesh.common.remote.offset.jdbc; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; import java.util.Objects; -public class RecordPosition { +import lombok.Data; +import lombok.ToString; - private final RecordPartition recordPartition; - private final RecordOffset recordOffset; +@Data +@ToString +public class JdbcRecordPartition extends RecordPartition { - public RecordPosition( - RecordPartition recordPartition, RecordOffset recordOffset) { - this.recordPartition = recordPartition; - this.recordOffset = recordOffset; - } + private String fileName; - public RecordPartition getPartition() { - return recordPartition; + @Override + public Class getRecordPartitionClass() { + return JdbcRecordPartition.class; } - public RecordOffset getOffset() { - return recordOffset; + public JdbcRecordPartition() { + super(); } @Override @@ -44,15 +45,15 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof RecordPosition)) { + if (o == null || getClass() != o.getClass()) { return false; } - RecordPosition position = (RecordPosition) o; - return recordPartition.equals(position.recordPartition) && recordOffset.equals(position.recordOffset); + JdbcRecordPartition that = (JdbcRecordPartition) o; + return Objects.equals(fileName, that.fileName); } @Override public int hashCode() { - return Objects.hash(recordPartition, recordOffset); + return Objects.hash(fileName); } } diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/kafka/KafkaRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/kafka/KafkaRecordOffset.java new file mode 100644 index 0000000000..134ef06fe4 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/kafka/KafkaRecordOffset.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.kafka; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class KafkaRecordOffset extends RecordOffset { + + private Long offset; + + @Override + public Class getRecordOffsetClass() { + return KafkaRecordOffset.class; + } + + public KafkaRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/kafka/KafkaRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/kafka/KafkaRecordPartition.java new file mode 100644 index 0000000000..77dec82267 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/kafka/KafkaRecordPartition.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.kafka; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import java.util.Objects; + +import lombok.Data; +import lombok.ToString; + + +@Data +@ToString +public class KafkaRecordPartition extends RecordPartition { + + private String topic; + + private Integer partition; + + @Override + public Class getRecordPartitionClass() { + return KafkaRecordPartition.class; + } + + public KafkaRecordPartition() { + super(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + KafkaRecordPartition that = (KafkaRecordPartition) o; + return Objects.equals(topic, that.topic) && Objects.equals(partition, that.partition); + } + + @Override + public int hashCode() { + return Objects.hash(topic, partition); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/prometheus/PrometheusRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/prometheus/PrometheusRecordOffset.java new file mode 100644 index 0000000000..acb5b3ce02 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/prometheus/PrometheusRecordOffset.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.prometheus; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class PrometheusRecordOffset extends RecordOffset { + + private Long offset; + + @Override + public Class getRecordOffsetClass() { + return PrometheusRecordOffset.class; + } + + public PrometheusRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/prometheus/PrometheusRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/prometheus/PrometheusRecordPartition.java new file mode 100644 index 0000000000..74302504c2 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/prometheus/PrometheusRecordPartition.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.prometheus; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class PrometheusRecordPartition extends RecordPartition { + + @Override + public Class getRecordPartitionClass() { + return PrometheusRecordPartition.class; + } + + public PrometheusRecordPartition() { + super(); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/pulsar/PulsarRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/pulsar/PulsarRecordOffset.java new file mode 100644 index 0000000000..bbe3d43803 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/pulsar/PulsarRecordOffset.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.pulsar; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class PulsarRecordOffset extends RecordOffset { + + /** + * if pull message from mq + * key=queueOffset, + * value=queueOffset value + */ + private Long queueOffset; + + @Override + public Class getRecordOffsetClass() { + return PulsarRecordOffset.class; + } + + public PulsarRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/pulsar/PulsarRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/pulsar/PulsarRecordPartition.java new file mode 100644 index 0000000000..0c152b50b3 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/pulsar/PulsarRecordPartition.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.pulsar; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import java.util.Objects; + +import lombok.Data; +import lombok.ToString; + + +@Data +@ToString +public class PulsarRecordPartition extends RecordPartition { + + private String topic; + + private Long queueId; + + + @Override + public Class getRecordPartitionClass() { + return PulsarRecordPartition.class; + } + + public PulsarRecordPartition() { + super(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PulsarRecordPartition that = (PulsarRecordPartition) o; + return Objects.equals(topic, that.topic) && Objects.equals(queueId, + that.queueId); + } + + @Override + public int hashCode() { + return Objects.hash(topic, queueId); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/rocketmq/RocketMQRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/rocketmq/RocketMQRecordOffset.java new file mode 100644 index 0000000000..56094c9e72 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/rocketmq/RocketMQRecordOffset.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.rocketmq; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class RocketMQRecordOffset extends RecordOffset { + + /** + * if pull message from mq + * key=queueOffset, + * value=queueOffset value + */ + private Long queueOffset; + + @Override + public Class getRecordOffsetClass() { + return RocketMQRecordOffset.class; + } + + public RocketMQRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/rocketmq/RocketMQRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/rocketmq/RocketMQRecordPartition.java new file mode 100644 index 0000000000..0963af6f59 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/rocketmq/RocketMQRecordPartition.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.rocketmq; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import java.util.Objects; + +import lombok.Data; +import lombok.ToString; + + +@Data +@ToString +public class RocketMQRecordPartition extends RecordPartition { + + /** + * key=topic,value=topicName key=brokerName,value=brokerName key=queueId,value=queueId + */ + + private String broker; + + private String topic; + + private String queueId; + + + @Override + public Class getRecordPartitionClass() { + return RocketMQRecordPartition.class; + } + + public RocketMQRecordPartition() { + super(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RocketMQRecordPartition that = (RocketMQRecordPartition) o; + return Objects.equals(broker, that.broker) && Objects.equals(topic, that.topic) && Objects.equals(queueId, + that.queueId); + } + + @Override + public int hashCode() { + return Objects.hash(broker, topic, queueId); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/spring/SpringRecordOffset.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/spring/SpringRecordOffset.java new file mode 100644 index 0000000000..d0916c5175 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/spring/SpringRecordOffset.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.spring; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class SpringRecordOffset extends RecordOffset { + + private Long offset; + + @Override + public Class getRecordOffsetClass() { + return SpringRecordOffset.class; + } + + public SpringRecordOffset() { + + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/spring/SpringRecordPartition.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/spring/SpringRecordPartition.java new file mode 100644 index 0000000000..4b536da139 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/offset/spring/SpringRecordPartition.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.offset.spring; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + + +@EqualsAndHashCode(callSuper = true) +@Data +@ToString +public class SpringRecordPartition extends RecordPartition { + + @Override + public Class getRecordPartitionClass() { + return SpringRecordPartition.class; + } + + public SpringRecordPartition() { + super(); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/IPayload.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/IPayload.java new file mode 100644 index 0000000000..aca27ffc21 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/IPayload.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.payload; + +/** + * IPayload + */ +public interface IPayload { + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/PayloadFactory.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/PayloadFactory.java new file mode 100644 index 0000000000..74e4880443 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/PayloadFactory.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.payload; + +import java.lang.reflect.Modifier; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.concurrent.ConcurrentHashMap; + +public class PayloadFactory { + + private PayloadFactory() { + } + + private static class PayloadFactoryHolder { + + private static final PayloadFactory INSTANCE = new PayloadFactory(); + } + + public static PayloadFactory getInstance() { + return PayloadFactoryHolder.INSTANCE; + } + + private final Map> registryPayload = new ConcurrentHashMap<>(); + + private boolean initialized = false; + + public void init() { + scan(); + } + + private synchronized void scan() { + if (initialized) { + return; + } + ServiceLoader payloads = ServiceLoader.load(IPayload.class); + for (IPayload payload : payloads) { + register(payload.getClass().getSimpleName(), payload.getClass()); + } + initialized = true; + } + + public void register(String type, Class clazz) { + if (Modifier.isAbstract(clazz.getModifiers())) { + return; + } + if (registryPayload.containsKey(type)) { + throw new RuntimeException(String.format("Fail to register, type:%s ,clazz:%s ", type, clazz.getName())); + } + registryPayload.put(type, clazz); + } + + public Class getClassByType(String type) { + return registryPayload.get(type); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/PayloadUtil.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/PayloadUtil.java new file mode 100644 index 0000000000..6a21d5a825 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/payload/PayloadUtil.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.payload; + +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.exception.PayloadFormatException; +import org.apache.eventmesh.common.utils.JsonUtils; + +import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream; +import com.google.protobuf.Any; +import com.google.protobuf.UnsafeByteOperations; + +public class PayloadUtil { + + public static Payload from(IPayload payload) { + byte[] payloadBytes = JsonUtils.toJSONBytes(payload); + Metadata.Builder metadata = Metadata.newBuilder().setType(payload.getClass().getSimpleName()); + return Payload.newBuilder().setMetadata(metadata).setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(payloadBytes))).build(); + } + + public static IPayload parse(Payload payload) { + Class targetClass = PayloadFactory.getInstance().getClassByType(payload.getMetadata().getType()); + if (targetClass == null) { + throw new PayloadFormatException(ErrorCode.BAD_REQUEST, + "unknown payload type:" + payload.getMetadata().getType()); + } + return (IPayload) JsonUtils.parseObject(new ByteBufferBackedInputStream(payload.getBody().getValue().asReadOnlyByteBuffer()), targetClass); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/BaseRemoteRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/BaseRemoteRequest.java new file mode 100644 index 0000000000..b8c4c06207 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/BaseRemoteRequest.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.remote.payload.IPayload; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Getter; + +@Getter +public abstract class BaseRemoteRequest implements IPayload { + + private final Map header = new HashMap<>(); + + public void addHeader(String key, String value) { + if (key == null || value == null) { + return; + } + header.put(key, value); + } + + public void addHeaders(Map map) { + if (map == null || map.isEmpty()) { + return; + } + map.forEach((k, v) -> { + if (k == null || v == null) { + return; + } + this.header.put(k, v); + }); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java new file mode 100644 index 0000000000..f78349703a --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateOrUpdateDataSourceReq.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +/** + * create or update datasource with custom data source config + */ +@Data +@EqualsAndHashCode(callSuper = true) +public class CreateOrUpdateDataSourceReq extends BaseRemoteRequest { + + private Integer id; + private DataSourceType type; + private String desc; + private Config config; + private String configClass; + private String region; + private String operator; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java new file mode 100644 index 0000000000..b09a3e10ed --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/CreateTaskRequest.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.remote.TransportType; +import org.apache.eventmesh.common.remote.job.JobType; + +import java.util.List; +import java.util.Map; + +import lombok.Data; + +/** + * Description: create task without task id, otherwise update task + */ +@Data +public class CreateTaskRequest { + + private String taskId; + + // task name + private String taskName; + + // task description + private String taskDesc; + + // task owner or updater + private String uid; + + private List jobs; + + // task source region + private String sourceRegion; + + // task target region + private String targetRegion; + + // mark request send by other region admin, default is false + private boolean flag = false; + + @Data + public static class JobDetail { + + private String jobId; + + private String jobDesc; + + // full/increase/check + private JobType jobType; + + private Map sourceDataSource; + + private String sourceConnectorDesc; + + private Map sinkDataSource; + + private String sinkConnectorDesc; + + private TransportType transportType; + + // job request from region + private String fromRegion; + + // job actually running region + private String runningRegion; + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/FetchJobRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/FetchJobRequest.java new file mode 100644 index 0000000000..2693a4a3aa --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/FetchJobRequest.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class FetchJobRequest extends BaseRemoteRequest { + private String jobID; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/FetchPositionRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/FetchPositionRequest.java new file mode 100644 index 0000000000..90563251ab --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/FetchPositionRequest.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class FetchPositionRequest extends BaseRemoteRequest { + + private String jobID; + + private String address; + + private RecordPosition recordPosition; + + private DataSourceType dataSourceType; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportHeartBeatRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportHeartBeatRequest.java new file mode 100644 index 0000000000..fb61ca3618 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportHeartBeatRequest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class ReportHeartBeatRequest extends BaseRemoteRequest { + + private String address; + + private String reportedTimeStamp; + + private String jobID; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java new file mode 100644 index 0000000000..aec33e4616 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportJobRequest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.remote.JobState; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@Data +@EqualsAndHashCode(callSuper = true) +@ToString +public class ReportJobRequest extends BaseRemoteRequest { + + private String jobID; + + private JobState state; + + private String address; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportPositionRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportPositionRequest.java new file mode 100644 index 0000000000..42694d5675 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportPositionRequest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@Data +@EqualsAndHashCode(callSuper = true) +@ToString +public class ReportPositionRequest extends BaseRemoteRequest { + + private String jobID; + + private List recordPositionList; + + private TaskState state; + + private String address; + + private DataSourceType dataSourceType; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java new file mode 100644 index 0000000000..bd38881c3d --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/request/ReportVerifyRequest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.request; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@Data +@EqualsAndHashCode(callSuper = true) +@ToString +public class ReportVerifyRequest extends BaseRemoteRequest { + + private String taskID; + + private String jobID; + + private String recordID; + + private String recordSig; + + private String connectorName; + + private String connectorStage; + + private String position; +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/BaseRemoteResponse.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/BaseRemoteResponse.java new file mode 100644 index 0000000000..3ea8401535 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/BaseRemoteResponse.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.response; + +import org.apache.eventmesh.common.remote.payload.IPayload; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Getter; +import lombok.Setter; + +@Getter +public abstract class BaseRemoteResponse implements IPayload { + @Setter + private boolean success = true; + @Setter + private int errorCode; + @Setter + private String desc; + + private Map header = new HashMap<>(); + + public void addHeader(String key, String value) { + if (key == null || value == null) { + return; + } + header.put(key, value); + } + + public void addHeaders(Map map) { + if (map == null || map.isEmpty()) { + return; + } + map.forEach((k, v) -> { + if (k == null || v == null) { + return; + } + this.header.put(k, v); + }); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java new file mode 100644 index 0000000000..11678dfcf0 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/CreateTaskResponse.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.response; + +import org.apache.eventmesh.common.remote.request.CreateTaskRequest; + +import java.util.List; + +import lombok.Data; + +@Data +public class CreateTaskResponse extends BaseRemoteResponse { + + private String taskId; + + private List jobIdList; + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/FetchJobResponse.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/FetchJobResponse.java new file mode 100644 index 0000000000..95d2d157e0 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/FetchJobResponse.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.response; + +import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.TransportType; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.job.JobConnectorConfig; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class FetchJobResponse extends BaseRemoteResponse { + + private String id; + + private TransportType transportType; + + private JobConnectorConfig connectorConfig; + + private List position; + + private TaskState state; + + private JobType type; + + public static FetchJobResponse successResponse() { + FetchJobResponse response = new FetchJobResponse(); + response.setSuccess(true); + response.setErrorCode(ErrorCode.SUCCESS); + return response; + } + + public static FetchJobResponse failResponse(int code, String desc) { + FetchJobResponse response = new FetchJobResponse(); + response.setSuccess(false); + response.setErrorCode(code); + response.setDesc(desc); + return response; + } + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/FetchPositionResponse.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/FetchPositionResponse.java new file mode 100644 index 0000000000..613623d654 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/FetchPositionResponse.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.response; + +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class FetchPositionResponse extends BaseRemoteResponse { + + private List recordPosition; + + public static FetchPositionResponse successResponse() { + FetchPositionResponse response = new FetchPositionResponse(); + response.setSuccess(true); + response.setErrorCode(ErrorCode.SUCCESS); + return response; + } + + public static FetchPositionResponse successResponse(List recordPosition) { + FetchPositionResponse response = successResponse(); + response.setRecordPosition(recordPosition); + return response; + } + + public static FetchPositionResponse failResponse(int code, String desc) { + FetchPositionResponse response = new FetchPositionResponse(); + response.setSuccess(false); + response.setErrorCode(code); + response.setDesc(desc); + return response; + } + +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/SimpleResponse.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/SimpleResponse.java new file mode 100644 index 0000000000..a4cdd52f99 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/remote/response/SimpleResponse.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.remote.response; + +import org.apache.eventmesh.common.remote.exception.ErrorCode; + +public class SimpleResponse extends BaseRemoteResponse { + /** + * just mean remote received or process success + */ + public static SimpleResponse success() { + return new SimpleResponse(); + } + + public static SimpleResponse fail(int errorCode, String msg) { + SimpleResponse response = new SimpleResponse(); + response.setErrorCode(errorCode); + response.setDesc(msg); + response.setSuccess(false); + return response; + } + + + /** + * build an error response. + * + * @param exception exception + * @return response + */ + public static SimpleResponse fail(Throwable exception) { + return fail(ErrorCode.INTERNAL_ERR, exception.getMessage()); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java new file mode 100644 index 0000000000..1782d46dd6 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/stubs/HeaderStub.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.stubs; + +import org.apache.eventmesh.common.protocol.http.common.ProtocolKey; +import org.apache.eventmesh.common.protocol.http.header.Header; +import org.apache.eventmesh.common.utils.HttpConvertsUtils; + +import java.util.Map; + +public class HeaderStub extends Header { + + public String code; + public String eventmeshenv; + + @Override + public Map toMap() { + return new HttpConvertsUtils().httpMapConverts(this, new ProtocolKey(), new ProtocolKey.EventMeshInstanceKey()); + } +} diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/ConfigurationContextUtil.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/ConfigurationContextUtil.java index 8617888994..fede64d650 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/ConfigurationContextUtil.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/ConfigurationContextUtil.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.common.utils; +import static org.apache.eventmesh.common.Constants.ADMIN; import static org.apache.eventmesh.common.Constants.GRPC; import static org.apache.eventmesh.common.Constants.HTTP; import static org.apache.eventmesh.common.Constants.TCP; @@ -36,7 +37,7 @@ public class ConfigurationContextUtil { private static final ConcurrentHashMap CONFIGURATION_MAP = new ConcurrentHashMap<>(); - public static final List KEYS = Lists.newArrayList(HTTP, TCP, GRPC); + public static final List KEYS = Lists.newArrayList(HTTP, TCP, GRPC, ADMIN); /** * Save http, tcp, grpc configuration at startup for global use. diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/IPUtils.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/IPUtils.java index 998735181e..dcef8f8243 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/IPUtils.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/IPUtils.java @@ -37,6 +37,7 @@ import io.netty.channel.Channel; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import inet.ipaddr.HostName; @@ -46,7 +47,10 @@ @Slf4j public class IPUtils { - public static String getLocalAddress() { + @Getter + public static String localAddress = init(); + + private static String init() { // if the progress works under docker environment // return the host ip about this docker located from environment value String dockerHostIp = System.getenv("docker_host_ip"); diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java index cf07bdfbe7..f2328541c4 100644 --- a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/JsonUtils.java @@ -24,6 +24,7 @@ import org.apache.commons.lang3.StringUtils; import java.io.IOException; +import java.io.InputStream; import java.lang.reflect.Type; import java.util.Map; import java.util.Objects; @@ -53,6 +54,14 @@ public class JsonUtils { OBJECT_MAPPER.registerModule(new JavaTimeModule()); } + public static T convertValue(Object fromValue, Class toValueType) { + return OBJECT_MAPPER.convertValue(fromValue, toValueType); + } + + public static T convertValue(Object fromValue, TypeReference toValueTypeRef) { + return OBJECT_MAPPER.convertValue(fromValue, toValueTypeRef); + } + public static T mapToObject(Map map, Class beanClass) { if (map == null) { return null; @@ -60,7 +69,15 @@ public static T mapToObject(Map map, Class beanClass) { Object obj = OBJECT_MAPPER.convertValue(map, beanClass); return beanClass.cast(obj); } - + + public static Map objectToMap(Object obj) { + if (obj == null) { + return null; + } + return OBJECT_MAPPER.convertValue(obj, new TypeReference>() { + }); + } + /** * Serialize object to json string. * @@ -108,6 +125,14 @@ public static T parseObject(String text, Class clazz) { } } + public static T parseObject(InputStream inputStream, Class clazz) { + try { + return OBJECT_MAPPER.readValue(inputStream, clazz); + } catch (IOException e) { + throw new JsonException("deserialize input stream to object error", e); + } + } + public static T parseObject(String text, Type type) { if (StringUtils.isEmpty(text)) { return null; @@ -156,6 +181,21 @@ public static T parseTypeReferenceObject(String text, TypeReference typeR } } + public static T parseTypeReferenceObject(Object object, TypeReference typeReference) { + if (object == null) { + return null; + } + return convertValue(object, typeReference); + } + + public static T parseTypeReferenceObject(byte[] text, TypeReference typeReference) { + try { + return OBJECT_MAPPER.readValue(text, typeReference); + } catch (IOException e) { + throw new JsonException("deserialize json string to typeReference error", e); + } + } + public static JsonNode getJsonNode(String text) { if (StringUtils.isEmpty(text)) { return null; diff --git a/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/PagedList.java b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/PagedList.java new file mode 100644 index 0000000000..322c585f04 --- /dev/null +++ b/eventmesh-common/src/main/java/org/apache/eventmesh/common/utils/PagedList.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.utils; + +import java.util.List; + +public class PagedList { + private int totalSize; + private int totalPage; + private int size; + private int page; + + private List data; + + public int getTotalSize() { + return totalSize; + } + + public void setTotalSize(int totalSize) { + this.totalSize = totalSize; + } + + public int getTotalPage() { + return totalPage; + } + + public void setTotalPage(int totalPage) { + this.totalPage = totalPage; + } + + public int getSize() { + return size; + } + + public void setSize(int size) { + this.size = size; + } + + public int getPage() { + return page; + } + + public void setPage(int page) { + this.page = page; + } + + public List getData() { + return data; + } + + public void setData(List data) { + this.data = data; + } +} diff --git a/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload b/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload new file mode 100644 index 0000000000..433cf57ed1 --- /dev/null +++ b/eventmesh-common/src/main/resources/META-INF/services/org.apache.eventmesh.common.remote.payload.IPayload @@ -0,0 +1,23 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +org.apache.eventmesh.common.remote.request.FetchJobRequest +org.apache.eventmesh.common.remote.response.FetchJobResponse +org.apache.eventmesh.common.remote.request.ReportPositionRequest +org.apache.eventmesh.common.remote.request.ReportJobRequest +org.apache.eventmesh.common.remote.request.ReportVerifyRequest +org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest +org.apache.eventmesh.common.remote.request.FetchPositionRequest +org.apache.eventmesh.common.remote.response.FetchPositionResponse \ No newline at end of file diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/loadbalance/RandomLoadBalanceSelectorTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/loadbalance/RandomLoadBalanceSelectorTest.java index 434e19f3b2..8f5c457264 100644 --- a/eventmesh-common/src/test/java/org/apache/eventmesh/common/loadbalance/RandomLoadBalanceSelectorTest.java +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/loadbalance/RandomLoadBalanceSelectorTest.java @@ -34,7 +34,7 @@ public class RandomLoadBalanceSelectorTest { private RandomLoadBalanceSelector randomLoadBalanceSelector; @BeforeEach - public void befor() { + public void before() { List address = new ArrayList<>(); address.add("A"); address.add("B"); diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java new file mode 100644 index 0000000000..253b1de926 --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/HttpConvertsUtilsTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.utils; + +import org.apache.eventmesh.common.protocol.http.common.ProtocolKey; +import org.apache.eventmesh.common.protocol.http.common.ProtocolKey.EventMeshInstanceKey; +import org.apache.eventmesh.common.protocol.http.header.Header; +import org.apache.eventmesh.common.stubs.HeaderStub; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class HttpConvertsUtilsTest { + + private final HeaderStub headerStub = new HeaderStub(); + private final ProtocolKey mockedProtocolKey = new ProtocolKey(); + private final EventMeshInstanceKey mockedEventMeshProtocolKey = new EventMeshInstanceKey(); + + @Test + void httpMapConverts() { + Map httpMapConverts = new HttpConvertsUtils().httpMapConverts(headerStub, mockedProtocolKey); + Assertions.assertEquals(httpMapConverts.get(headerStub.code), headerStub.code); + } + + @Test + void testHttpMapConverts() { + Map httpMapConverts = new HttpConvertsUtils().httpMapConverts(headerStub, mockedProtocolKey, mockedEventMeshProtocolKey); + Assertions.assertEquals(httpMapConverts.get(headerStub.code), headerStub.code); + Assertions.assertEquals(httpMapConverts.get(headerStub.eventmeshenv), headerStub.eventmeshenv); + } + + @Test + void httpHeaderConverts() { + HashMap headerParams = new HashMap<>(); + String code = "test"; + headerParams.put("code", code); + Header header = new HttpConvertsUtils().httpHeaderConverts(headerStub, headerParams); + Assertions.assertEquals(code, header.toMap().get("code")); + } + + @Test + void testHttpHeaderConverts() { + HashMap headerParams = new HashMap<>(); + String env = "test"; + headerParams.put("eventmeshenv", env); + Header header = new HttpConvertsUtils().httpHeaderConverts(headerStub, headerParams, mockedEventMeshProtocolKey); + Assertions.assertEquals(env, header.toMap().get("eventmeshenv")); + } +} diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/IPUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/IPUtilsTest.java index b3fa293e89..757486dd89 100644 --- a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/IPUtilsTest.java +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/IPUtilsTest.java @@ -19,16 +19,9 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.junitpioneer.jupiter.SetEnvironmentVariable; public class IPUtilsTest { - @Test - @SetEnvironmentVariable(key = "docker_host_ip", value = "dockHostIP") - public void testDockerIP() { - Assertions.assertEquals("dockHostIP", IPUtils.getLocalAddress()); - } - @Test public void testLocalhostIP() { Assertions.assertNotNull(IPUtils.getLocalAddress()); diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java new file mode 100644 index 0000000000..e66b8d711a --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/JsonPathUtilsTest.java @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.common.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; + +public class JsonPathUtilsTest { + + @Test + public void tesTisEmptyJsonObject() { + String emptyJsonObject = "{}"; + assertTrue(JsonPathUtils.isEmptyJsonObject(emptyJsonObject)); + + String jsonObject = "{\"key\": \"value\"}"; + assertFalse(JsonPathUtils.isEmptyJsonObject(jsonObject)); + + String emptyJsonArray = "[]"; + assertFalse(JsonPathUtils.isEmptyJsonObject(emptyJsonArray)); + + String jsonArray = "[{\"key\": \"value\"}]"; + assertFalse(JsonPathUtils.isEmptyJsonObject(jsonArray)); + + String empty = ""; + assertFalse(JsonPathUtils.isEmptyJsonObject(empty)); + } + + @Test + public void testParseStrict() { + String json = "{\"key\": \"value\"}"; + JsonNode result = JsonPathUtils.parseStrict(json); + assertNotNull(result); + assertEquals("value", result.get("key").asText()); + + String emptyJsonObject = "{}"; + JsonNode result2 = JsonPathUtils.parseStrict(emptyJsonObject); + assertNotNull(result2); + assertTrue(result2.isEmpty()); + + } + + @Test + public void testBuildJsonString() { + Map person = new HashMap<>(); + person.put("name", "John"); + person.put("age", "30"); + String actual = JsonPathUtils.buildJsonString("person", person); + String excepted = "{\"person\":{\"name\":\"John\",\"age\":\"30\"}}"; + assertNotNull(actual); + assertEquals(excepted, actual); + } + + @Test + public void testIsValidAndDefinite() { + String jsonPath = "$.person[0].name"; + String jsonPath2 = "$.person[*].address.city"; + String jsonPath3 = "person.job[0].title"; + + assertTrue(JsonPathUtils.isValidAndDefinite(jsonPath)); + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath2)); + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath3)); + + String jsonPath4 = null; + String jsonPath5 = ""; + + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath4)); + assertFalse(JsonPathUtils.isValidAndDefinite(jsonPath5)); + } + + + @Test + public void testGetJsonPathValue() { + String jsonContent = "{ \"person\": { \"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" } } }"; + + String jsonPath1 = "$.person.name"; + String jsonPath2 = "$.person.address.city"; + String jsonPath3 = "$.person.age"; + + assertEquals("John Doe", JsonPathUtils.getJsonPathValue(jsonContent, jsonPath1)); + assertEquals("New York", JsonPathUtils.getJsonPathValue(jsonContent, jsonPath2)); + assertEquals("30", JsonPathUtils.getJsonPathValue(jsonContent, jsonPath3)); + + } + + @Test + public void testConvertToJsonNode() throws JsonProcessingException { + String jsonString1 = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + + JsonNode node1 = JsonPathUtils.convertToJsonNode(jsonString1); + assertEquals("John Doe", node1.get("name").asText()); + assertEquals("New York", node1.get("address").get("city").asText()); + assertEquals("30", node1.get("age").asText()); + } + + @Test + public void testMatchJsonPathValueWithString() { + String jsonString = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + + String jsonPath1 = "$.name"; + String result1 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath1); + assertEquals("John Doe", result1); + + String jsonPath2 = "$.age"; + String result2 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath2); + assertEquals("30", result2); // Age should be returned as a string + + String jsonPath3 = "$.address.city"; + String result3 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath3); + assertEquals("New York", result3); + + String jsonPath4 = "$.job"; + String result4 = JsonPathUtils.matchJsonPathValueWithString(jsonString, jsonPath4); + assertEquals("null", result4); + } + + @Test + public void testJsonPathParse() { + String jsonString = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + + String jsonPath1 = "$.name"; + Object result1 = JsonPathUtils.jsonPathParse(jsonString, jsonPath1); + assertNotNull(result1); + assertEquals("John Doe", result1); + + String jsonPath2 = "$.address.city"; + Object result2 = JsonPathUtils.jsonPathParse(jsonString, jsonPath2); + assertNotNull(result2); + assertEquals("New York", result2); + } + + @Test + public void testMatchJsonPathValue() throws JsonProcessingException { + String jsonString = "{\"name\": \"John Doe\", \"age\": 30, \"address\": { \"city\": \"New York\" }}"; + String jsonPath1 = "$.name"; + String result1 = JsonPathUtils.matchJsonPathValue(jsonString, jsonPath1); + assertEquals("\"John Doe\"", result1); + + String jsonPath2 = "$.address.city"; + String result2 = JsonPathUtils.matchJsonPathValue(jsonString, jsonPath2); + assertEquals("\"New York\"", result2); + + String jsonPath3 = "$.job"; + String result3 = JsonPathUtils.matchJsonPathValue(jsonString, jsonPath3); + assertEquals("null", result3); + } +} diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java new file mode 100644 index 0000000000..03c52dadad --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/LogUtilTest.java @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.eventmesh.common.utils; + +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import java.util.function.Supplier; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.slf4j.Logger; +import org.slf4j.spi.LoggingEventBuilder; + +@ExtendWith(MockitoExtension.class) +class LogUtilTest { + + private Logger mockLogger; + private LoggingEventBuilder mockEventBuilder; + private Supplier supplier; + private String logMessage; + + @BeforeEach + void setUp() { + + mockLogger = mock(Logger.class); + mockEventBuilder = mock(LoggingEventBuilder.class); + + supplier = () -> "{\"orderId\": 12345, \"amount\": 100}"; + logMessage = "Processing order with data: {}"; + } + + @Test + void testDebugLogsWithSupplier() { + + doReturn(mockEventBuilder).when(mockLogger).atDebug(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + + LogUtil.debug(mockLogger, logMessage, supplier); + + verify(mockLogger).atDebug(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testDebugLogsWithSupplierAndException() { + Throwable throwable = new RuntimeException("Order processing failed"); + + + doReturn(mockEventBuilder).when(mockLogger).atDebug(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).setCause(throwable); + + LogUtil.debug(mockLogger, logMessage, supplier, throwable); + + verify(mockLogger).atDebug(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).setCause(throwable); + verify(mockEventBuilder).log(logMessage); + } + + @Test + void testDebugLogsWithSuppliers() { + + Supplier supplier2 = () -> "{\"orderId\": 67890, \"amount\": 200}"; + + doReturn(mockEventBuilder).when(mockLogger).atDebug(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier2)); + + LogUtil.debug(mockLogger, logMessage, supplier, supplier2); + + verify(mockLogger).atDebug(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).addArgument(same(supplier2)); + verify(mockEventBuilder).log(logMessage); + } + + @Test + void testInfoLogsWithSupplier() { + + doReturn(mockEventBuilder).when(mockLogger).atInfo(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + + LogUtil.info(mockLogger, logMessage, supplier); + + verify(mockLogger).atInfo(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testInfoLogsWithSupplierAndException() { + + Throwable throwable = new RuntimeException("Order processing failed"); + + doReturn(mockEventBuilder).when(mockLogger).atInfo(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).setCause(throwable); + + LogUtil.info(mockLogger, logMessage, supplier, throwable); + + verify(mockLogger).atInfo(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).setCause(throwable); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testInfoLogsWithSuppliers() { + + Supplier supplier2 = () -> "{\"orderId\": 67890, \"amount\": 200}"; + + doReturn(mockEventBuilder).when(mockLogger).atInfo(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier2)); + + LogUtil.info(mockLogger, logMessage, supplier, supplier2); + + verify(mockLogger).atInfo(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).addArgument(same(supplier2)); + verify(mockEventBuilder).log(logMessage); + } + + @Test + void testWarnLogsWithSupplier() { + + doReturn(mockEventBuilder).when(mockLogger).atWarn(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + + LogUtil.warn(mockLogger, logMessage, supplier); + + verify(mockLogger).atWarn(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testWarnLogsWithSupplierAndException() { + + Throwable throwable = new RuntimeException("Order processing failed"); + + doReturn(mockEventBuilder).when(mockLogger).atWarn(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).setCause(throwable); + + LogUtil.warn(mockLogger, logMessage, supplier, throwable); + + verify(mockLogger).atWarn(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).setCause(throwable); + verify(mockEventBuilder).log(logMessage); + + } + + @Test + void testWarnLogsWithSuppliers() { + + Supplier supplier2 = () -> "{\"orderId\": 67890, \"amount\": 200}"; + + doReturn(mockEventBuilder).when(mockLogger).atWarn(); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier)); + doReturn(mockEventBuilder).when(mockEventBuilder).addArgument(same(supplier2)); + + LogUtil.warn(mockLogger, logMessage, supplier, supplier2); + + verify(mockLogger).atWarn(); + verify(mockEventBuilder).addArgument(same(supplier)); + verify(mockEventBuilder).addArgument(same(supplier2)); + verify(mockEventBuilder).log(logMessage); + } + +} \ No newline at end of file diff --git a/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java new file mode 100644 index 0000000000..0cba2a6ad9 --- /dev/null +++ b/eventmesh-common/src/test/java/org/apache/eventmesh/common/utils/ThreadUtilsTest.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.eventmesh.common.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; + +class ThreadUtilsTest { + + @Test + void testRandomPauseBetweenMinAndMax() { + + long min = 1000; + long max = 5000; + + long start = System.currentTimeMillis(); + ThreadUtils.randomPause(min, max, TimeUnit.MILLISECONDS); + long end = System.currentTimeMillis(); + + long pause = end - start; + + assertTrue(pause >= min && pause <= max, "Pause time should be between min and max"); + } + + @Test + void testRandomPauseWithInterruption() { + + Thread.currentThread().interrupt(); + ThreadUtils.randomPause(1000, 2000, TimeUnit.MILLISECONDS); + assertTrue(Thread.currentThread().isInterrupted()); + } + + @Test + void testDeprecatedSleep() { + + ThreadUtils.sleep(1000); + assertTrue(true, "Method should execute without any exception"); + } + + @Test + void testSleepWithTimeOutAndTimeUnit() throws InterruptedException { + + ThreadUtils.sleepWithThrowException(5000, TimeUnit.MILLISECONDS); + assertTrue(true, "Method should execute without any exception"); + } + + @Test + void testSleepWithNullTimeUnit() throws InterruptedException { + + ThreadUtils.sleepWithThrowException(5000, null); + assertTrue(true, "Method should not throw any exception with null TimeUnit"); + } + + @Test + void testSleepWithThrowExceptionInterruption() { + Thread.currentThread().interrupt(); + + assertThrows(InterruptedException.class, () -> { + ThreadUtils.sleepWithThrowException(5000, TimeUnit.MILLISECONDS); + }); + } + + @Test + void testGetPIDWithRealProcessId() { + + long pid = ThreadUtils.getPID(); + assertTrue(pid > 0); + + long cashedPId = ThreadUtils.getPID(); + assertEquals(pid, cashedPId); + } + + @Test + void testGetPIDWithMultiThread() throws InterruptedException { + + final long[] pid1 = new long[1]; + final long[] pid2 = new long[1]; + + Thread thread1 = new Thread(() -> { + pid1[0] = ThreadUtils.getPID(); + assertTrue(pid1[0] > 0); + }); + + Thread thread2 = new Thread(() -> { + pid2[0] = ThreadUtils.getPID(); + assertTrue(pid2[0] > 0); + }); + + thread1.start(); + thread2.start(); + + thread1.join(); + thread2.join(); + + assertEquals(pid1[0], pid2[0]); + } +} \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-canal/build.gradle b/eventmesh-connectors/eventmesh-connector-canal/build.gradle new file mode 100644 index 0000000000..6beeac41eb --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/build.gradle @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +List canal = [ + "com.alibaba.otter:canal.instance.manager:$canal_version", + "com.alibaba.otter:canal.parse:$canal_version", + "com.alibaba.otter:canal.server:$canal_version" +] + +dependencies { + api project(":eventmesh-openconnect:eventmesh-openconnect-java") + implementation "org.locationtech.jts:jts-core" + implementation project(":eventmesh-common") + implementation canal + implementation "com.alibaba:druid" + compileOnly 'com.mysql:mysql-connector-j' + compileOnly 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' + testImplementation "org.mockito:mockito-core" + testImplementation "org.mockito:mockito-junit-jupiter" +} \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-canal/gradle.properties b/eventmesh-connectors/eventmesh-connector-canal/gradle.properties new file mode 100644 index 0000000000..a439bdacf9 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/gradle.properties @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +canal_version=1.1.7 +pluginType=connector +pluginName=MySQL \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/ByteArrayConverter.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/ByteArrayConverter.java new file mode 100644 index 0000000000..350b678856 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/ByteArrayConverter.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.Converter; +import org.apache.commons.beanutils.converters.ArrayConverter; +import org.apache.commons.beanutils.converters.ByteConverter; + +import java.nio.charset.StandardCharsets; + + +public class ByteArrayConverter implements Converter { + + public static final Converter SQL_BYTES = new ByteArrayConverter(null); + private static final Converter converter = new ArrayConverter(byte[].class, new ByteConverter()); + + protected final Object defaultValue; + protected final boolean useDefault; + + public ByteArrayConverter() { + this.defaultValue = null; + this.useDefault = false; + } + + public ByteArrayConverter(Object defaultValue) { + this.defaultValue = defaultValue; + this.useDefault = true; + } + + public Object convert(Class type, Object value) { + if (value == null) { + if (useDefault) { + return (defaultValue); + } else { + throw new ConversionException("No value specified"); + } + } + + if (value instanceof byte[]) { + return (value); + } + + if (value instanceof String) { + try { + return ((String) value).getBytes(StandardCharsets.ISO_8859_1); + } catch (Exception e) { + throw new ConversionException(e); + } + } + + return converter.convert(type, value); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java new file mode 100644 index 0000000000..6f112081e8 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/CanalConnectRecord.java @@ -0,0 +1,284 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal; + +import org.apache.eventmesh.common.remote.job.SyncConsistency; +import org.apache.eventmesh.common.remote.job.SyncMode; +import org.apache.eventmesh.connector.canal.model.EventColumn; +import org.apache.eventmesh.connector.canal.model.EventType; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; + +import lombok.Data; + +@Data +public class CanalConnectRecord implements Serializable { + + private static final long serialVersionUID = 1L; + + private String schemaName; + + private String tableName; + + // mysql instance gtid range + private String gtid; + + private String currentGtid; + + /** + * The business type of the changed data (I/U/D/C/A/E), consistent with the EventType defined in EntryProtocol in canal. + */ + private EventType eventType; + + /** + * The business time of the changed data. + */ + private long executeTime; + + /** + * The primary key value before the change, if it is insert/delete, the primary key value before and after the change is the same. + */ + private List oldKeys = new ArrayList(); + + /** + * The primary key value after the change, if it is insert/delete, the primary key value before and after the change is the same. + */ + private List keys = new ArrayList(); + + /** + * Other fields that are not primary keys + */ + private List columns = new ArrayList(); + + // ====================== Additional properties of the data during the running process ============================= + /** + * The expected size, based on the estimation of the binlog event + */ + private long size = 1024; + + /** + * The id of the synchronization mapping relationship + */ + private long pairId = -1; + + /** + * When eventType = CREATE/ALTER/ERASE, it is the corresponding SQL statement, other situations are dynamically generated INSERT/UPDATE/DELETE sql + */ + private String sql; + + /** + * The schemaName of ddl/query, there will be cross-database ddl, need to keep the current schemaName of executing ddl + */ + private String ddlSchemaName; + + /** + * Custom synchronization mode, allows to override the default pipeline parameter, such as for remedial data synchronization + */ + private SyncMode syncMode; + + /** + * Custom synchronization consistency, allows to override the default pipeline parameter, + * such as forcing the database to be queried for field groups + */ + private SyncConsistency syncConsistency; + + /** + * Whether it is remedy data, such as data automatically generated by loopback remedy, or manual correction data produced by freedom + */ + private boolean remedy = false; + + /** + * Generate the corresponding hint content + */ + private String hint; + + /** + * Whether to ignore the schema when generating SQL, such as for tddl/drds, need to ignore the schema + */ + private boolean withoutSchema = false; + + private String journalName; + + private long binLogOffset; + + public CanalConnectRecord() { + super(); + } + + // ======================== helper method ================= + + /** + * Return all fields to be changed + */ + public List getUpdatedColumns() { + List columns = new ArrayList(); + for (EventColumn column : this.columns) { + if (column.isUpdate()) { + columns.add(column); + } + } + + return columns; + } + + /** + * Return all changed primary key fields + */ + public List getUpdatedKeys() { + List columns = new ArrayList(); + for (EventColumn column : this.keys) { + if (column.isUpdate()) { + columns.add(column); + } + } + + return columns; + } + + private List cloneColumn(List columns) { + if (columns == null) { + return null; + } + + List cloneColumns = new ArrayList(); + for (EventColumn column : columns) { + cloneColumns.add(column.clone()); + } + + return cloneColumns; + } + + public CanalConnectRecord clone() { + CanalConnectRecord record = new CanalConnectRecord(); + record.setTableName(tableName); + record.setSchemaName(schemaName); + record.setDdlSchemaName(ddlSchemaName); + record.setEventType(eventType); + record.setExecuteTime(executeTime); + record.setKeys(cloneColumn(keys)); + record.setColumns(cloneColumn(columns)); + record.setOldKeys(cloneColumn(oldKeys)); + record.setSize(size); + record.setPairId(pairId); + record.setSql(sql); + record.setSyncMode(syncMode); + record.setSyncConsistency(syncConsistency); + record.setRemedy(remedy); + record.setHint(hint); + record.setWithoutSchema(withoutSchema); + return record; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((columns == null) ? 0 : columns.hashCode()); + result = prime * result + ((eventType == null) ? 0 : eventType.hashCode()); + result = prime * result + (int) (executeTime ^ (executeTime >>> 32)); + result = prime * result + ((keys == null) ? 0 : keys.hashCode()); + result = prime * result + ((oldKeys == null) ? 0 : oldKeys.hashCode()); + result = prime * result + (int) (pairId ^ (pairId >>> 32)); + result = prime * result + ((schemaName == null) ? 0 : schemaName.hashCode()); + result = prime * result + ((tableName == null) ? 0 : tableName.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + CanalConnectRecord other = (CanalConnectRecord) obj; + if (columns == null) { + if (other.columns != null) { + return false; + } + } else if (!columns.equals(other.columns)) { + return false; + } + if (eventType != other.eventType) { + return false; + } + if (executeTime != other.executeTime) { + return false; + } + if (keys == null) { + if (other.keys != null) { + return false; + } + } else if (!keys.equals(other.keys)) { + return false; + } + if (oldKeys == null) { + if (other.oldKeys != null) { + return false; + } + } else if (!oldKeys.equals(other.oldKeys)) { + return false; + } + if (pairId != other.pairId) { + return false; + } + if (schemaName == null) { + if (other.schemaName != null) { + return false; + } + } else if (!schemaName.equals(other.schemaName)) { + return false; + } + if (tableName == null) { + if (other.tableName != null) { + return false; + } + } else if (!tableName.equals(other.tableName)) { + return false; + } + return true; + } + + @Override + public String toString() { + return "CanalConnectRecord{" + + "tableName='" + tableName + '\'' + + ", schemaName='" + schemaName + '\'' + + ", eventType=" + eventType + + ", executeTime=" + executeTime + + ", oldKeys=" + oldKeys + + ", keys=" + keys + + ", columns=" + columns + + ", size=" + size + + ", pairId=" + pairId + + ", sql='" + sql + '\'' + + ", ddlSchemaName='" + ddlSchemaName + '\'' + + ", syncMode=" + syncMode + + ", syncConsistency=" + syncConsistency + + ", remedy=" + remedy + + ", hint='" + hint + '\'' + + ", withoutSchema=" + withoutSchema + + '}'; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/DatabaseConnection.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/DatabaseConnection.java new file mode 100644 index 0000000000..0310e5434c --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/DatabaseConnection.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal; + + +import org.apache.eventmesh.common.config.connector.rdb.canal.SinkConnectorConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.SourceConnectorConfig; + +import java.sql.Connection; +import java.sql.SQLException; + +import com.alibaba.druid.pool.DruidDataSource; + +public class DatabaseConnection { + + public static DruidDataSource sourceDataSource; + + public static DruidDataSource sinkDataSource; + + public static SourceConnectorConfig sourceConfig; + + public static SinkConnectorConfig sinkConfig; + + public static DruidDataSource createDruidDataSource(String url, String userName, String passWord) { + DruidDataSource dataSource = new DruidDataSource(); + dataSource.setUrl(url); + dataSource.setUsername(userName); + dataSource.setPassword(passWord); + dataSource.setInitialSize(5); + dataSource.setMinIdle(5); + dataSource.setMaxActive(20); + dataSource.setMaxWait(60000); + dataSource.setTimeBetweenEvictionRunsMillis(60000); + dataSource.setMinEvictableIdleTimeMillis(300000); + dataSource.setValidationQuery("SELECT 1"); + dataSource.setTestWhileIdle(true); + dataSource.setTestOnBorrow(false); + dataSource.setTestOnReturn(false); + dataSource.setPoolPreparedStatements(true); + dataSource.setMaxPoolPreparedStatementPerConnectionSize(20); + return dataSource; + } + + public static void initSourceConnection() { + sourceDataSource = createDruidDataSource(sourceConfig.getUrl(), + sourceConfig.getUserName(), + sourceConfig.getPassWord()); + } + + public static void initSinkConnection() { + sinkDataSource = createDruidDataSource(sinkConfig.getUrl(), + sinkConfig.getUserName(), + sinkConfig.getPassWord()); + } + + + public static Connection getSourceConnection() throws SQLException { + return sourceDataSource.getConnection(); + } + + public static Connection getSinkConnection() throws SQLException { + return sinkDataSource.getConnection(); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/SqlTimestampConverter.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/SqlTimestampConverter.java new file mode 100644 index 0000000000..8df0b1c097 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/SqlTimestampConverter.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.Converter; +import org.apache.commons.lang.time.DateFormatUtils; + +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.ParsePosition; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +public class SqlTimestampConverter implements Converter { + + /** + * Field description + */ + public static final String[] DATE_FORMATS = new String[] {"yyyy-MM-dd", "HH:mm:ss", "yyyy-MM-dd HH:mm:ss", + "yyyy-MM-dd hh:mm:ss.fffffffff", "EEE MMM dd HH:mm:ss zzz yyyy", + DateFormatUtils.ISO_DATETIME_FORMAT.getPattern(), + DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), + DateFormatUtils.SMTP_DATETIME_FORMAT.getPattern(), }; + + public static final Converter SQL_TIMESTAMP = new SqlTimestampConverter(null); + + /** + * The default value specified to our Constructor, if any. + */ + private final Object defaultValue; + + /** + * Should we return the default value on conversion errors? + */ + private final boolean useDefault; + + /** + * Create a {@link Converter} that will throw a {@link ConversionException} if a conversion error occurs. + */ + public SqlTimestampConverter() { + this.defaultValue = null; + this.useDefault = false; + } + + /** + * Create a {@link Converter} that will return the specified default value if a conversion error occurs. + * + * @param defaultValue The default value to be returned + */ + public SqlTimestampConverter(Object defaultValue) { + this.defaultValue = defaultValue; + this.useDefault = true; + } + + /** + * Convert the specified input object into an output object of the specified type. + * + * @param type Data type to which this value should be converted + * @param value The input value to be converted + * @throws ConversionException if conversion cannot be performed successfully + */ + public Object convert(Class type, Object value) { + if (value == null) { + if (useDefault) { + return (defaultValue); + } else { + throw new ConversionException("No value specified"); + } + } + + if (value instanceof java.sql.Date && java.sql.Date.class.equals(type)) { + return value; + } else if (value instanceof java.sql.Time && java.sql.Time.class.equals(type)) { + return value; + } else if (value instanceof Timestamp && Timestamp.class.equals(type)) { + return value; + } else { + try { + if (java.sql.Date.class.equals(type)) { + return new java.sql.Date(convertTimestamp2TimeMillis(value.toString())); + } else if (java.sql.Time.class.equals(type)) { + return new java.sql.Time(convertTimestamp2TimeMillis(value.toString())); + } else if (Timestamp.class.equals(type)) { + return new Timestamp(convertTimestamp2TimeMillis(value.toString())); + } else { + return new Timestamp(convertTimestamp2TimeMillis(value.toString())); + } + } catch (Exception e) { + throw new ConversionException("Value format invalid: " + e.getMessage(), e); + } + } + + } + + private Long convertTimestamp2TimeMillis(String input) { + if (input == null) { + return null; + } + + try { + return Timestamp.valueOf(input).getTime(); + } catch (Exception nfe) { + try { + try { + return parseDate(input, Locale.ENGLISH).getTime(); + } catch (Exception err) { + return parseDate(input, Locale.getDefault()).getTime(); + } + } catch (Exception err) { + return Long.parseLong(input); + } + } + } + + private Date parseDate(String str, Locale locale) throws ParseException { + if ((str == null) || (SqlTimestampConverter.DATE_FORMATS == null)) { + throw new IllegalArgumentException("Date and Patterns must not be null"); + } + + SimpleDateFormat parser = null; + ParsePosition pos = new ParsePosition(0); + + for (int i = 0; i < SqlTimestampConverter.DATE_FORMATS.length; i++) { + if (i == 0) { + parser = new SimpleDateFormat(SqlTimestampConverter.DATE_FORMATS[0], locale); + } else { + parser.applyPattern(SqlTimestampConverter.DATE_FORMATS[i]); + } + pos.setIndex(0); + Date date = parser.parse(str, pos); + if ((date != null) && (pos.getIndex() == str.length())) { + return date; + } + } + + throw new ParseException("Unable to parse the date: " + str, -1); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/SqlUtils.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/SqlUtils.java new file mode 100644 index 0000000000..1008ad1cf3 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/SqlUtils.java @@ -0,0 +1,921 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal; + +import static org.apache.eventmesh.connector.canal.ByteArrayConverter.SQL_BYTES; +import static org.apache.eventmesh.connector.canal.SqlTimestampConverter.SQL_TIMESTAMP; + +import org.apache.commons.beanutils.ConvertUtilsBean; +import org.apache.commons.lang.StringUtils; + +import java.io.UnsupportedEncodingException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.JDBCType; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.DateTimeException; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.ZoneId; +import java.time.temporal.Temporal; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.locationtech.jts.geom.GeometryFactory; +import org.locationtech.jts.io.WKBReader; +import org.locationtech.jts.io.WKTReader; + +import com.mysql.cj.Constants; +import com.mysql.cj.MysqlType; +import com.taobao.tddl.dbsync.binlog.LogBuffer; + +public class SqlUtils { + + public static final String REQUIRED_FIELD_NULL_SUBSTITUTE = " "; + private static final Map> sqlTypeToJavaTypeMap = new HashMap>(); + private static final ConvertUtilsBean convertUtilsBean = new ConvertUtilsBean(); + private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory(); + private static final WKBReader WKB_READER = new WKBReader(GEOMETRY_FACTORY); + private static final BigDecimal NANO_SEC = new BigDecimal(LogBuffer.DIG_BASE); + private static final LocalDateTime BASE = LocalDateTime.of(1970, 1, 1, 0, 0, 0, 0); + private static final long ONE_HOUR = 3600; + private static final long ONE_MINUTE = 60; + + static { + // regist Converter + convertUtilsBean.register(SQL_TIMESTAMP, Date.class); + convertUtilsBean.register(SQL_TIMESTAMP, Time.class); + convertUtilsBean.register(SQL_TIMESTAMP, Timestamp.class); + convertUtilsBean.register(SQL_BYTES, byte[].class); + + // bool + sqlTypeToJavaTypeMap.put(Types.BOOLEAN, Boolean.class); + + // int + sqlTypeToJavaTypeMap.put(Types.TINYINT, Integer.class); + sqlTypeToJavaTypeMap.put(Types.SMALLINT, Integer.class); + sqlTypeToJavaTypeMap.put(Types.INTEGER, Integer.class); + + // long + sqlTypeToJavaTypeMap.put(Types.BIGINT, Long.class); + // mysql bit + sqlTypeToJavaTypeMap.put(Types.BIT, BigInteger.class); + + // decimal + sqlTypeToJavaTypeMap.put(Types.REAL, Float.class); + sqlTypeToJavaTypeMap.put(Types.FLOAT, Float.class); + sqlTypeToJavaTypeMap.put(Types.DOUBLE, Double.class); + sqlTypeToJavaTypeMap.put(Types.NUMERIC, BigDecimal.class); + sqlTypeToJavaTypeMap.put(Types.DECIMAL, BigDecimal.class); + + // date + sqlTypeToJavaTypeMap.put(Types.DATE, Date.class); + sqlTypeToJavaTypeMap.put(Types.TIME, Time.class); + sqlTypeToJavaTypeMap.put(Types.TIMESTAMP, Timestamp.class); + + // blob + sqlTypeToJavaTypeMap.put(Types.BLOB, byte[].class); + + // byte[] + sqlTypeToJavaTypeMap.put(Types.REF, byte[].class); + sqlTypeToJavaTypeMap.put(Types.OTHER, byte[].class); + sqlTypeToJavaTypeMap.put(Types.ARRAY, byte[].class); + sqlTypeToJavaTypeMap.put(Types.STRUCT, byte[].class); + sqlTypeToJavaTypeMap.put(Types.SQLXML, byte[].class); + sqlTypeToJavaTypeMap.put(Types.BINARY, byte[].class); + sqlTypeToJavaTypeMap.put(Types.DATALINK, byte[].class); + sqlTypeToJavaTypeMap.put(Types.DISTINCT, byte[].class); + sqlTypeToJavaTypeMap.put(Types.VARBINARY, byte[].class); + sqlTypeToJavaTypeMap.put(Types.JAVA_OBJECT, byte[].class); + sqlTypeToJavaTypeMap.put(Types.LONGVARBINARY, byte[].class); + + // String + sqlTypeToJavaTypeMap.put(Types.CHAR, String.class); + sqlTypeToJavaTypeMap.put(Types.VARCHAR, String.class); + sqlTypeToJavaTypeMap.put(Types.LONGVARCHAR, String.class); + sqlTypeToJavaTypeMap.put(Types.LONGNVARCHAR, String.class); + sqlTypeToJavaTypeMap.put(Types.NCHAR, String.class); + sqlTypeToJavaTypeMap.put(Types.NVARCHAR, String.class); + sqlTypeToJavaTypeMap.put(Types.NCLOB, String.class); + sqlTypeToJavaTypeMap.put(Types.CLOB, String.class); + } + + public static String genPrepareSqlOfInClause(int size) { + StringBuilder sql = new StringBuilder(); + sql.append("("); + for (int i = 0; i < size; i++) { + sql.append("?"); + if (i < size - 1) { + sql.append(","); + } + } + sql.append(")"); + return sql.toString(); + } + + public static void setInClauseParameters(PreparedStatement preparedStatement, List params) throws SQLException { + setInClauseParameters(preparedStatement, 0, params); + } + + public static void setInClauseParameters(PreparedStatement preparedStatement, int paramIndexStart, List params) throws SQLException { + for (int i = 0; i < params.size(); i++) { + preparedStatement.setString(paramIndexStart + i, params.get(i)); + } + } + + public static String sqlValueToString(ResultSet rs, int index, int sqlType) throws SQLException { + Class requiredType = sqlTypeToJavaTypeMap.get(sqlType); + if (requiredType == null) { + throw new IllegalArgumentException("unknow java.sql.Types - " + sqlType); + } + + return getResultSetValue(rs, index, requiredType); + } + + public static Object stringToSqlValue(String value, int sqlType, boolean isRequired, boolean isEmptyStringNulled) { + if (SqlUtils.isTextType(sqlType)) { + if ((value == null) || (StringUtils.isEmpty(value) && isEmptyStringNulled)) { + return isRequired ? REQUIRED_FIELD_NULL_SUBSTITUTE : null; + } else { + return value; + } + } else { + if (StringUtils.isEmpty(value)) { + return isEmptyStringNulled ? null : value; + } else { + Class requiredType = sqlTypeToJavaTypeMap.get(sqlType); + if (requiredType == null) { + throw new IllegalArgumentException("unknow java.sql.Types - " + sqlType); + } else if (requiredType.equals(String.class)) { + return value; + } else if (isNumeric(sqlType)) { + return convertUtilsBean.convert(value.trim(), requiredType); + } else { + return convertUtilsBean.convert(value, requiredType); + } + } + } + } + + public static String encoding(String source, int sqlType, String sourceEncoding, String targetEncoding) { + switch (sqlType) { + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + case Types.NCHAR: + case Types.NVARCHAR: + case Types.LONGNVARCHAR: + case Types.CLOB: + case Types.NCLOB: + if (!StringUtils.isEmpty(source)) { + String fromEncoding = StringUtils.isBlank(sourceEncoding) ? "UTF-8" : sourceEncoding; + String toEncoding = StringUtils.isBlank(targetEncoding) ? "UTF-8" : targetEncoding; + + // if (false == StringUtils.equalsIgnoreCase(fromEncoding, + // toEncoding)) { + try { + return new String(source.getBytes(fromEncoding), toEncoding); + } catch (UnsupportedEncodingException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + // } + } + break; + default: + throw new IllegalStateException("Unexpected value: " + sqlType); + } + + return source; + } + + /** + * Retrieve a JDBC column value from a ResultSet, using the specified value type. + *

+ * Uses the specifically typed ResultSet accessor methods, falling back to {@link #getResultSetValue(ResultSet, int)} for unknown types. + *

+ * Note that the returned value may not be assignable to the specified required type, in case of an unknown type. Calling code needs to deal with + * this case appropriately, e.g. throwing a corresponding exception. + * + * @param rs is the ResultSet holding the data + * @param index is the column index + * @param requiredType the required value type (may be null) + * @return the value object + * @throws SQLException if thrown by the JDBC API + */ + private static String getResultSetValue(ResultSet rs, int index, Class requiredType) throws SQLException { + if (requiredType == null) { + return getResultSetValue(rs, index); + } + + Object value = null; + boolean wasNullCheck = false; + + // Explicitly extract typed value, as far as possible. + if (String.class.equals(requiredType)) { + value = rs.getString(index); + } else if (boolean.class.equals(requiredType) || Boolean.class.equals(requiredType)) { + value = rs.getBoolean(index); + wasNullCheck = true; + } else if (byte.class.equals(requiredType) || Byte.class.equals(requiredType)) { + value = rs.getByte(index); + wasNullCheck = true; + } else if (short.class.equals(requiredType) || Short.class.equals(requiredType)) { + value = rs.getShort(index); + wasNullCheck = true; + } else if (int.class.equals(requiredType) || Integer.class.equals(requiredType)) { + value = rs.getLong(index); + wasNullCheck = true; + } else if (long.class.equals(requiredType) || Long.class.equals(requiredType)) { + value = rs.getBigDecimal(index); + wasNullCheck = true; + } else if (float.class.equals(requiredType) || Float.class.equals(requiredType)) { + value = rs.getFloat(index); + wasNullCheck = true; + } else if (double.class.equals(requiredType) || Double.class.equals(requiredType) || Number.class.equals(requiredType)) { + value = rs.getDouble(index); + wasNullCheck = true; + } else if (Time.class.equals(requiredType)) { + value = rs.getString(index); + } else if (Timestamp.class.equals(requiredType) || Date.class.equals(requiredType)) { + value = rs.getString(index); + } else if (BigDecimal.class.equals(requiredType)) { + value = rs.getBigDecimal(index); + } else if (BigInteger.class.equals(requiredType)) { + value = rs.getBigDecimal(index); + } else if (Blob.class.equals(requiredType)) { + value = rs.getBlob(index); + } else if (Clob.class.equals(requiredType)) { + value = rs.getClob(index); + } else if (byte[].class.equals(requiredType)) { + byte[] bytes = rs.getBytes(index); + if (bytes != null) { + value = new String(bytes, StandardCharsets.ISO_8859_1); + } + } else { + // Some unknown type desired -> rely on getObject. + value = getResultSetValue(rs, index); + } + + // Perform was-null check if demanded (for results that the + // JDBC driver returns as primitives). + if (wasNullCheck && (value != null) && rs.wasNull()) { + value = null; + } + + return (value == null) ? null : convertUtilsBean.convert(value); + } + + /** + * Retrieve a JDBC column value from a ResultSet, using the most appropriate value type. The returned value should be a detached value object, not + * having any ties to the active ResultSet: in particular, it should not be a Blob or Clob object but rather a byte array respectively String + * representation. + *

+ * Uses the getObject(index) method, but includes additional "hacks" to get around Oracle 10g returning a non-standard object for its + * TIMESTAMP datatype and a java.sql.Date for DATE columns leaving out the time portion: These columns will explicitly be extracted + * as standard java.sql.Timestamp object. + * + * @param rs is the ResultSet holding the data + * @param index is the column index + * @return the value object + * @throws SQLException if thrown by the JDBC API + * @see Blob + * @see Clob + * @see Timestamp + */ + private static String getResultSetValue(ResultSet rs, int index) throws SQLException { + Object obj = rs.getObject(index); + return (obj == null) ? null : convertUtilsBean.convert(obj); + } + + // private static Object convertTimestamp(Timestamp timestamp) { + // return (timestamp == null) ? null : timestamp.getTime(); + // } + + /** + * Check whether the given SQL type is numeric. + */ + public static boolean isNumeric(int sqlType) { + return (Types.BIT == sqlType) || (Types.BIGINT == sqlType) || (Types.DECIMAL == sqlType) || (Types.DOUBLE == sqlType) + || (Types.FLOAT == sqlType) || (Types.INTEGER == sqlType) || (Types.NUMERIC == sqlType) || (Types.REAL == sqlType) + || (Types.SMALLINT == sqlType) || (Types.TINYINT == sqlType); + } + + public static boolean isTextType(int sqlType) { + return sqlType == Types.CHAR || sqlType == Types.VARCHAR || sqlType == Types.CLOB || sqlType == Types.LONGVARCHAR || sqlType == Types.NCHAR + || sqlType == Types.NVARCHAR || sqlType == Types.NCLOB || sqlType == Types.LONGNVARCHAR; + } + + public static JDBCType toJDBCType(String connectorDataType) { + MysqlType mysqlType = MysqlType.getByName(connectorDataType); + return JDBCType.valueOf(mysqlType.getJdbcType()); + } + + public static BigDecimal toBigDecimal(Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + String strValue = (String) value; + if (!org.apache.commons.lang3.StringUtils.isNotBlank(strValue)) { + return null; + } + try { + return new BigDecimal(strValue); + } catch (Exception e) { + if ("true".equals(strValue)) { + return BigDecimal.ONE; + } + if ("false".equals(strValue)) { + return BigDecimal.ZERO; + } + return new BigDecimal(strValue); + } + } else if (value instanceof Number) { + if (value instanceof BigDecimal) { + return (BigDecimal) value; + } + if (value instanceof Integer) { + return BigDecimal.valueOf(((Integer) value).longValue()); + } + if (value instanceof Long) { + return BigDecimal.valueOf(((Long) value)); + } + if (value instanceof Double) { + return BigDecimal.valueOf(((Double) value)); + } + if (value instanceof Float) { + return BigDecimal.valueOf(((Float) value).doubleValue()); + } + if (value instanceof BigInteger) { + return new BigDecimal((BigInteger) value); + } + if (value instanceof Byte) { + return BigDecimal.valueOf(((Byte) value).longValue()); + } + if (value instanceof Short) { + return BigDecimal.valueOf(((Short) value).longValue()); + } + return null; + } else if (value instanceof Boolean) { + return Boolean.TRUE.equals(value) ? BigDecimal.ONE : BigDecimal.ZERO; + } else { + throw new UnsupportedOperationException("class " + value.getClass() + ", value '" + value + "' , parse to big decimal failed."); + } + } + + public static Double toDouble(Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + String strValue = (String) value; + if (org.apache.commons.lang3.StringUtils.isBlank(strValue)) { + return null; + } + try { + return Double.parseDouble(strValue); + } catch (Exception e) { + if ("true".equals(strValue)) { + return 1.0d; + } + if ("false".equals(strValue)) { + return 0.0d; + } + return new BigDecimal(strValue).doubleValue(); + } + } else if (value instanceof Number) { + return ((Number) value).doubleValue(); + } else { + if (value instanceof Boolean) { + return Boolean.TRUE.equals(value) ? 1.0d : 0.0d; + } + throw new UnsupportedOperationException("class " + value.getClass() + ", value '" + value + "' , parse to double failed."); + } + } + + public static Long toLong(Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + String strValue = (String) value; + if (org.apache.commons.lang3.StringUtils.isBlank(strValue)) { + return null; + } + try { + return Long.parseLong(strValue); + } catch (Exception e) { + try { + return Long.decode(strValue); + } catch (Exception e2) { + if ("true".equals(strValue)) { + return 1L; + } + if ("false".equals(strValue)) { + return 0L; + } + return new BigDecimal(strValue).longValue(); + } + } + } else if (value instanceof Number) { + return ((Number) value).longValue(); + } else { + if (value instanceof Boolean) { + return Boolean.TRUE.equals(value) ? 1L : 0L; + } + throw new UnsupportedOperationException(value.getClass() + ", value '" + value + "' , parse to long failed."); + } + } + + public static boolean isZeroTime(Object value) { + if (value == null || org.apache.commons.lang3.StringUtils.isBlank(value.toString())) { + return false; + } + return value.toString().startsWith("0000-00-00"); + } + + public static String removeZone(String datetime) { + if (datetime == null || datetime.length() == 0) { + return datetime; + } + int len = datetime.length(); + if (datetime.charAt(len - 1) == 'Z' || datetime.charAt(len - 1) == 'z') { + return datetime.substring(0, len - 1).trim(); + } + if (len >= 7) { + char checkCharAt1 = datetime.charAt(len - 2); + if ((checkCharAt1 == '+' || checkCharAt1 == '-') && len >= 10) { + return datetime.substring(0, len - 2).trim(); + } + char checkCharAt2 = datetime.charAt(len - 3); + if ((checkCharAt2 == '+' || checkCharAt2 == '-') && len >= 11) { + return datetime.substring(0, len - 3).trim(); + } + char checkCharAt3 = datetime.charAt(len - 6); + if ((checkCharAt3 == '+' || checkCharAt3 == '-') && checkCharAt2 == ':') { + return datetime.substring(0, len - 6).trim(); + } + char checkCharAt4 = datetime.charAt(len - 5); + if ((checkCharAt4 == '+' || checkCharAt4 == '-') && checkCharAt2 == ':') { + return datetime.substring(0, len - 5).trim(); + } + char checkCharAt5 = len >= 9 ? datetime.charAt(len - 9) : ' '; + if ((checkCharAt5 == '+' || checkCharAt5 == '-') && checkCharAt2 == ':' && checkCharAt3 == ':') { + return datetime.substring(0, len - 9).trim(); + } + char checkCharAt6 = datetime.charAt(len - 7); + if (checkCharAt6 == '+' || checkCharAt6 == '-') { + return datetime.substring(0, len - 7).trim(); + } + if (checkCharAt4 == '+' || checkCharAt4 == '-') { + return datetime.substring(0, len - 5).trim(); + } + } + return datetime; + } + + + + public static String bytes2hex(byte[] b) { + if (b == null) { + return null; + } + if (b.length == 0) { + return ""; + } + StringBuilder hs = new StringBuilder(); + for (byte element : b) { + String stmp = Integer.toHexString(element & 255).toUpperCase(); + if (stmp.length() == 1) { + hs.append(Constants.CJ_MINOR_VERSION); + hs.append(stmp); + } else { + hs.append(stmp); + } + } + return hs.toString(); + } + + public static String convertToString(Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + return (String) value; + } + if (value instanceof BigInteger) { + return value.toString(); + } + if (value instanceof BigDecimal) { + return ((BigDecimal) value).toPlainString(); + } + if (value instanceof Number) { + return new BigDecimal(value.toString()).toPlainString(); + } + if (value instanceof Boolean) { + return Boolean.TRUE.equals(value) ? "1" : "0"; + } + if (value instanceof byte[]) { + return "0x" + bytes2hex((byte[]) value); + } + if (value instanceof Timestamp) { + long nanos = ((Timestamp) value).getNanos(); + value = Instant.ofEpochMilli(((Timestamp) value).getTime() - (nanos / 1000000)).plusNanos(nanos).atZone(ZoneId.systemDefault()) + .toLocalDateTime(); + } else if (value instanceof Date) { + value = ((Date) value).toLocalDate().atTime(0, 0); + } else if (value instanceof Time) { + value = LocalDateTime.of(LocalDate.of(1970, 1, 1), + Instant.ofEpochMilli(((Time) value).getTime()).atZone(ZoneId.systemDefault()).toLocalTime()); + } else if (value instanceof java.util.Date) { + value = ((java.util.Date) value).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime(); + } + if (value instanceof LocalDateTime) { + return coverLocalDateTime2String((LocalDateTime) value); + } else if (value instanceof OffsetDateTime) { + OffsetDateTime zone = (OffsetDateTime) value; + String datetimeStr = coverLocalDateTime2String(zone.toLocalDateTime()); + String zonedStr = zone.getOffset().toString(); + if ("Z".equals(zonedStr)) { + return datetimeStr + "+00:00"; + } + return datetimeStr + zonedStr; + } else if (!(value instanceof LocalTime)) { + return value.toString(); + } else { + LocalTime local3 = (LocalTime) value; + return String.format("%02d:%02d:%02d", local3.getHour(), local3.getMinute(), local3.getSecond()); + } + } + + + private static String coverLocalDateTime2String(LocalDateTime localDateTime) { + LocalDate localDate = localDateTime.toLocalDate(); + LocalTime localTime = localDateTime.toLocalTime(); + int year = localDate.getYear(); + int month = localDate.getMonthValue(); + int day = localDate.getDayOfMonth(); + int hour = localTime.getHour(); + int minute = localTime.getMinute(); + int second = localTime.getSecond(); + int nano = localTime.getNano(); + return nano == 0 ? String.format("%04d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second) : + String.format("%04d-%02d-%02d %02d:%02d:%02d.%s", year, month, day, hour, minute, second, + new BigDecimal(nano).divide(NANO_SEC).toPlainString().substring(2)); + } + + public static String toMySqlTime(Object value) { + if (value == null || StringUtils.isBlank(value.toString())) { + return null; + } + if (value instanceof String) { + return value.toString(); + } + LocalDateTime localTime = toLocalDateTime(value); + if (BASE.isBefore(localTime) || BASE.isEqual(localTime)) { + long diffHours = Duration.between(BASE, localTime).toHours(); + if (localTime.getNano() == 0) { + return String.format("%02d:%02d:%02d", diffHours, localTime.getMinute(), localTime.getSecond()); + } + return String.format("%02d:%02d:%02d.%s", diffHours, localTime.getMinute(), localTime.getSecond(), + Integer.parseInt(trimEnd(String.valueOf(localTime.getNano()), '0'))); + } + Duration duration = Duration.between(localTime, BASE); + long totalSecond = duration.getSeconds(); + long hours = totalSecond / ONE_HOUR; + long remaining = totalSecond - (hours * ONE_HOUR); + long minutes = remaining / ONE_MINUTE; + remaining = remaining - (minutes * ONE_MINUTE); + if (duration.getNano() == 0) { + return String.format("-%02d:%02d:%02d", hours, minutes, remaining); + } + return String.format("-%02d:%02d:%02d.%s", hours, minutes, remaining, Integer.parseInt(trimEnd(String.valueOf(duration.getNano()), '0'))); + } + + public static String trimEnd(String str, char trimChar) { + if (str == null || str.isEmpty()) { + return str; + } + char[] val = str.toCharArray(); + int len = val.length; + while (0 < len && val[len - 1] == trimChar) { + len--; + } + return len < val.length ? str.substring(0, len) : str; + } + + public static byte[] numberToBinaryArray(Number number) { + BigInteger bigInt = BigInteger.valueOf(number.longValue()); + int size = (bigInt.bitLength() + 7) / 8; + byte[] result = new byte[size]; + byte[] bigIntBytes = bigInt.toByteArray(); + int start = bigInt.bitLength() % 8 == 0 ? 1 : 0; + int length = Math.min(bigIntBytes.length - start, size); + System.arraycopy(bigIntBytes, start, result, size - length, length); + return result; + } + + public static Integer toInt(Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + String strValue = ((String) value).toLowerCase(); + if (StringUtils.isBlank(strValue)) { + return null; + } + try { + return Integer.parseInt(strValue); + } catch (Exception e) { + try { + return Integer.decode(strValue); + } catch (Exception e2) { + if ("true".equals(strValue)) { + return 1; + } + if ("false".equals(strValue)) { + return 0; + } + return new BigDecimal(strValue).intValue(); + } + } + } else if (value instanceof Number) { + return ((Number) value).intValue(); + } else { + if (value instanceof Boolean) { + return Boolean.TRUE.equals(value) ? 1 : 0; + } + throw new UnsupportedOperationException("class " + value.getClass() + ", value '" + value + "' , parse to int failed."); + } + } + + private static LocalDateTime toLocalDateTime(String value) { + if (value.trim().length() >= 4) { + String dateStr2 = removeZone(value); + int len = dateStr2.length(); + if (len == 4) { + return LocalDateTime.of(Integer.parseInt(dateStr2), 1, 1, 0, 0, 0, 0); + } + if (dateStr2.charAt(4) == '-') { + switch (len) { + case 7: + String[] dataParts = dateStr2.split("-"); + return LocalDateTime.of(Integer.parseInt(dataParts[0]), Integer.parseInt(dataParts[1]), 1, 0, 0, 0, 0); + case 8: + case 9: + case 11: + case 12: + case 14: + case 15: + case 17: + case 18: + default: + String[] dataTime = dateStr2.split(" "); + String[] dataParts2 = dataTime[0].split("-"); + String[] timeParts = dataTime[1].split(":"); + String[] secondParts = timeParts[2].split("\\."); + secondParts[1] = StringUtils.rightPad(secondParts[1], 9, Constants.CJ_MINOR_VERSION); + return LocalDateTime.of(Integer.parseInt(dataParts2[0]), Integer.parseInt(dataParts2[1]), Integer.parseInt(dataParts2[2]), + Integer.parseInt(timeParts[0]), Integer.parseInt(timeParts[1]), Integer.parseInt(secondParts[0]), + Integer.parseInt(secondParts[1])); + case 10: + String[] dataParts3 = dateStr2.split("-"); + return LocalDateTime.of(Integer.parseInt(dataParts3[0]), Integer.parseInt(dataParts3[1]), Integer.parseInt(dataParts3[2]), 0, + 0, 0, 0); + case 13: + String[] dataTime2 = dateStr2.split(" "); + String[] dataParts4 = dataTime2[0].split("-"); + return LocalDateTime.of(Integer.parseInt(dataParts4[0]), Integer.parseInt(dataParts4[1]), Integer.parseInt(dataParts4[2]), + Integer.parseInt(dataTime2[1]), 0, 0, 0); + case 16: + String[] dataTime3 = dateStr2.split(" "); + String[] dataParts5 = dataTime3[0].split("-"); + String[] timeParts2 = dataTime3[1].split(":"); + return LocalDateTime.of(Integer.parseInt(dataParts5[0]), Integer.parseInt(dataParts5[1]), Integer.parseInt(dataParts5[2]), + Integer.parseInt(timeParts2[0]), Integer.parseInt(timeParts2[1]), 0, 0); + case 19: + String[] dataTime4 = dateStr2.split(" "); + String[] dataParts6 = dataTime4[0].split("-"); + String[] timeParts3 = dataTime4[1].split(":"); + return LocalDateTime.of(Integer.parseInt(dataParts6[0]), Integer.parseInt(dataParts6[1]), Integer.parseInt(dataParts6[2]), + Integer.parseInt(timeParts3[0]), Integer.parseInt(timeParts3[1]), Integer.parseInt(timeParts3[2]), 0); + } + } else if (dateStr2.charAt(2) == ':') { + switch (len) { + case 5: + String[] timeParts4 = dateStr2.split(":"); + return LocalDateTime.of(0, 1, 1, Integer.parseInt(timeParts4[0]), Integer.parseInt(timeParts4[1]), 0, 0); + case 8: + String[] timeParts5 = dateStr2.split(":"); + return LocalDateTime.of(0, 1, 1, Integer.parseInt(timeParts5[0]), Integer.parseInt(timeParts5[1]), + Integer.parseInt(timeParts5[2]), 0); + default: + String[] timeParts6 = dateStr2.split(":"); + String[] secondParts2 = timeParts6[2].split("\\."); + secondParts2[1] = StringUtils.rightPad(secondParts2[1], 9, Constants.CJ_MINOR_VERSION); + return LocalDateTime.of(0, 1, 1, Integer.parseInt(timeParts6[0]), Integer.parseInt(timeParts6[1]), + Integer.parseInt(secondParts2[0]), Integer.parseInt(secondParts2[1])); + } + } else { + throw new UnsupportedOperationException(value.getClass() + ", value '" + value + "' , parse to local date time failed."); + } + } else if (StringUtils.isNumeric(value)) { + return LocalDateTime.of(Integer.parseInt(value), 1, 1, 0, 0, 0, 0); + } else { + throw new DateTimeException(value + " format error."); + } + } + + public static LocalDateTime toLocalDateTime(Object value) { + if (value == null || StringUtils.isBlank(value.toString())) { + return null; + } + if (value instanceof Temporal) { + if (value instanceof LocalDateTime) { + return (LocalDateTime) value; + } + if (value instanceof OffsetDateTime) { + return ((OffsetDateTime) value).toLocalDateTime(); + } + if (value instanceof LocalTime) { + return LocalDateTime.of(LocalDate.of(1970, 1, 1), (LocalTime) value); + } else if (value instanceof LocalDate) { + return LocalDateTime.of((LocalDate) value, LocalTime.of(0, 0)); + } else { + throw new UnsupportedOperationException(value.getClass() + ", value '" + value + "' , parse local date time failed."); + } + } else if (!(value instanceof java.util.Date)) { + return toLocalDateTime(value.toString()); + } else { + if (value instanceof Timestamp) { + long nanos = ((Timestamp) value).getNanos(); + return Instant.ofEpochMilli(((Timestamp) value).getTime() - (nanos / 1000000)).plusNanos(nanos).atZone(ZoneId.systemDefault()) + .toLocalDateTime(); + } else if (value instanceof java.sql.Date) { + return ((java.sql.Date) value).toLocalDate().atTime(0, 0); + } else { + if (!(value instanceof Time)) { + return ((java.util.Date) value).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime(); + } + return LocalDateTime.of(LocalDate.of(1970, 1, 1), + Instant.ofEpochMilli(((Time) value).getTime()).atZone(ZoneId.systemDefault()).toLocalTime()); + } + } + } + + public static boolean isHexNumber(String str) { + boolean flag = true; + if (str.startsWith("0x") || str.startsWith("0X")) { + str = str.substring(2); + } + int i = 0; + while (true) { + if (i < str.length()) { + char cc = str.charAt(i); + if (cc != '0' && cc != '1' && cc != '2' && cc != '3' && cc != '4' && cc != '5' && cc != '6' && cc != '7' && cc != '8' && cc != '9' + && cc != 'A' && cc != 'B' && cc != 'C' && cc != 'D' && cc != 'E' && cc != 'F' && cc != 'a' && cc != 'b' && cc != 'c' && cc != 'd' + && cc != 'e' && cc != 'f') { + flag = false; + break; + } + i++; + } else { + break; + } + } + return flag; + } + + public static byte[] toBytes(Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + String strVal = (String) value; + if ((strVal.startsWith("0x") || strVal.startsWith("0X")) && isHexNumber(strVal)) { + return hex2bytes(strVal.substring(2)); + } + return ((String) value).getBytes(StandardCharsets.ISO_8859_1); + } else if (value instanceof byte[]) { + return (byte[]) value; + } else { + throw new UnsupportedOperationException("class " + value.getClass() + ", value '" + value + "' , parse to bytes failed."); + } + } + + public static String toGeometry(Object value) throws Exception { + if (value == null) { + return null; + } + if (value instanceof String) { + String strVal = (String) value; + if (!strVal.startsWith("0x") && !strVal.startsWith("0X")) { + return (String) value; + } + return new WKTReader().read((String) value).toText(); + } else if (value instanceof byte[]) { + // mysql add 4 byte in header of geometry + byte[] bytes = (byte[]) value; + if (bytes.length > 4) { + byte[] dst = new byte[bytes.length - 4]; + System.arraycopy(bytes, 4, dst, 0, bytes.length - 4); + return new WKBReader().read(dst).toText(); + } + return new WKBReader().read(bytes).toText(); + } else { + throw new UnsupportedOperationException("class " + value.getClass() + ", value '" + value + "' , " + "parse to geometry failed."); + } + } + + public static byte[] hex2bytes(String hexStr) { + if (hexStr == null) { + return null; + } + if (org.apache.commons.lang3.StringUtils.isBlank(hexStr)) { + return new byte[0]; + } + + if (hexStr.length() % 2 == 1) { + hexStr = "0" + hexStr; + } + + int count = hexStr.length() / 2; + byte[] ret = new byte[count]; + for (int i = 0; i < count; i++) { + int index = i * 2; + char c1 = hexStr.charAt(index); + char c2 = hexStr.charAt(index + 1); + ret[i] = (byte) (toByte(c1) << 4); + ret[i] = (byte) (ret[i] | toByte(c2)); + } + return ret; + } + + private static byte toByte(char src) { + switch (Character.toUpperCase(src)) { + case '0': + return 0; + case '1': + return 1; + case '2': + return 2; + case '3': + return 3; + case '4': + return 4; + case '5': + return 5; + case '6': + return 6; + case '7': + return 7; + case '8': + return 8; + case '9': + return 9; + case 'A': + return 10; + case 'B': + return 11; + case 'C': + return 12; + case 'D': + return 13; + case 'E': + return 14; + case 'F': + return 15; + default: + throw new IllegalStateException("0-F"); + } + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/config/CanalServerConfig.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/config/CanalServerConfig.java new file mode 100644 index 0000000000..b28982f7b6 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/config/CanalServerConfig.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.config; + +import org.apache.eventmesh.common.config.connector.Config; + +import lombok.Data; +import lombok.EqualsAndHashCode; + +@Data +@EqualsAndHashCode(callSuper = true) +public class CanalServerConfig extends Config { + + private boolean sourceEnable; + + private boolean sinkEnable; + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/AbstractDbDialect.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/AbstractDbDialect.java new file mode 100644 index 0000000000..4cf0f82ec9 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/AbstractDbDialect.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.dialect; + +import org.apache.eventmesh.connector.canal.template.SqlTemplate; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.jdbc.support.lob.LobHandler; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.support.TransactionTemplate; + +import lombok.extern.slf4j.Slf4j; + + +@Slf4j +public abstract class AbstractDbDialect implements DbDialect { + + protected int databaseMajorVersion; + protected int databaseMinorVersion; + protected String databaseName; + protected SqlTemplate sqlTemplate; + protected JdbcTemplate jdbcTemplate; + protected TransactionTemplate transactionTemplate; + protected LobHandler lobHandler; + + public AbstractDbDialect(final JdbcTemplate jdbcTemplate, LobHandler lobHandler) { + this.jdbcTemplate = jdbcTemplate; + this.lobHandler = lobHandler; + + this.transactionTemplate = new TransactionTemplate(); + transactionTemplate.setTransactionManager(new DataSourceTransactionManager(jdbcTemplate.getDataSource())); + transactionTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + + } + + public AbstractDbDialect(JdbcTemplate jdbcTemplate, LobHandler lobHandler, String name, int majorVersion, + int minorVersion) { + this.jdbcTemplate = jdbcTemplate; + this.lobHandler = lobHandler; + + this.transactionTemplate = new TransactionTemplate(); + transactionTemplate.setTransactionManager(new DataSourceTransactionManager(jdbcTemplate.getDataSource())); + transactionTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + + this.databaseName = name; + this.databaseMajorVersion = majorVersion; + this.databaseMinorVersion = minorVersion; + + } + + public String getName() { + return databaseName; + } + + public int getMajorVersion() { + return databaseMajorVersion; + } + + @Override + public int getMinorVersion() { + return databaseMinorVersion; + } + + public String getVersion() { + return databaseMajorVersion + "." + databaseMinorVersion; + } + + public LobHandler getLobHandler() { + return lobHandler; + } + + public JdbcTemplate getJdbcTemplate() { + return jdbcTemplate; + } + + public TransactionTemplate getTransactionTemplate() { + return transactionTemplate; + } + + public SqlTemplate getSqlTemplate() { + return sqlTemplate; + } + + public String getShardColumns(String schema, String table) { + return null; + } + + public void destory() { + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/DbDialect.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/DbDialect.java new file mode 100644 index 0000000000..781c2fe954 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/DbDialect.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.dialect; + +import org.apache.eventmesh.connector.canal.template.SqlTemplate; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.lob.LobHandler; +import org.springframework.transaction.support.TransactionTemplate; + +/** + * DbDialect + */ +public interface DbDialect { + + public String getName(); + + public String getVersion(); + + public int getMajorVersion(); + + public int getMinorVersion(); + + public String getDefaultSchema(); + + public String getDefaultCatalog(); + + public boolean isCharSpacePadded(); + + public boolean isCharSpaceTrimmed(); + + public boolean isEmptyStringNulled(); + + public boolean isSupportMergeSql(); + + public LobHandler getLobHandler(); + + public JdbcTemplate getJdbcTemplate(); + + public TransactionTemplate getTransactionTemplate(); + + public SqlTemplate getSqlTemplate(); + + public String getShardColumns(String schema, String table); + + public void destory(); +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/MysqlDialect.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/MysqlDialect.java new file mode 100644 index 0000000000..bfe5628716 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/dialect/MysqlDialect.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.dialect; + +import org.apache.eventmesh.connector.canal.template.MysqlSqlTemplate; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.lob.LobHandler; + + +public class MysqlDialect extends AbstractDbDialect { + + public MysqlDialect(JdbcTemplate jdbcTemplate, LobHandler lobHandler) { + super(jdbcTemplate, lobHandler); + sqlTemplate = new MysqlSqlTemplate(); + } + + public boolean isCharSpacePadded() { + return false; + } + + public boolean isCharSpaceTrimmed() { + return true; + } + + public boolean isEmptyStringNulled() { + return false; + } + + public boolean isSupportMergeSql() { + return true; + } + + public String getDefaultSchema() { + return null; + } + + public String getDefaultCatalog() { + return jdbcTemplate.queryForObject("select database()", String.class); + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java new file mode 100644 index 0000000000..7d83bd4f3f --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/interceptor/SqlBuilderLoadInterceptor.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.interceptor; + +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkIncrementConfig; +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.dialect.DbDialect; +import org.apache.eventmesh.connector.canal.model.EventColumn; +import org.apache.eventmesh.connector.canal.model.EventType; +import org.apache.eventmesh.connector.canal.template.SqlTemplate; + +import java.util.List; + +import org.springframework.util.CollectionUtils; + +import lombok.Getter; +import lombok.Setter; + +/** + * compute latest sql + */ +public class SqlBuilderLoadInterceptor { + + @Getter + @Setter + private DbDialect dbDialect; + + public boolean before(CanalSinkIncrementConfig sinkConfig, CanalConnectRecord record) { + // build sql + SqlTemplate sqlTemplate = dbDialect.getSqlTemplate(); + EventType type = record.getEventType(); + String sql = null; + + String schemaName = (record.isWithoutSchema() ? null : record.getSchemaName()); + + String shardColumns = null; + + if (type.isInsert()) { + sql = sqlTemplate.getMergeSql(schemaName, + record.getTableName(), + buildColumnNames(record.getKeys()), + buildColumnNames(record.getColumns()), + new String[] {}, + true, + shardColumns); + } else if (type.isUpdate()) { + boolean existOldKeys = !CollectionUtils.isEmpty(record.getOldKeys()); + boolean rowMode = sinkConfig.getSyncMode().isRow(); + String[] keyColumns = null; + String[] otherColumns = null; + if (existOldKeys) { + keyColumns = buildColumnNames(record.getOldKeys()); + otherColumns = buildColumnNames(record.getUpdatedColumns(), record.getKeys()); + } else { + keyColumns = buildColumnNames(record.getKeys()); + otherColumns = buildColumnNames(record.getUpdatedColumns()); + } + + if (rowMode && !existOldKeys) { + sql = sqlTemplate.getMergeSql(schemaName, + record.getTableName(), + keyColumns, + otherColumns, + new String[] {}, + true, + shardColumns); + } else { + sql = sqlTemplate.getUpdateSql(schemaName, record.getTableName(), keyColumns, otherColumns, true, shardColumns); + } + } else if (type.isDelete()) { + sql = sqlTemplate.getDeleteSql(schemaName, + record.getTableName(), + buildColumnNames(record.getKeys())); + } + + if (record.getHint() != null) { + record.setSql(record.getHint() + sql); + } else { + record.setSql(sql); + } + return false; + } + + private String[] buildColumnNames(List columns) { + String[] result = new String[columns.size()]; + for (int i = 0; i < columns.size(); i++) { + EventColumn column = columns.get(i); + result[i] = column.getColumnName(); + } + return result; + } + + private String[] buildColumnNames(List columns1, List columns2) { + String[] result = new String[columns1.size() + columns2.size()]; + int i = 0; + for (i = 0; i < columns1.size(); i++) { + EventColumn column = columns1.get(i); + result[i] = column.getColumnName(); + } + + for (; i < columns1.size() + columns2.size(); i++) { + EventColumn column = columns2.get(i - columns1.size()); + result[i] = column.getColumnName(); + } + return result; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventColumn.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventColumn.java new file mode 100644 index 0000000000..352fc060a0 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventColumn.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.model; + +import java.io.Serializable; + +import lombok.Getter; +import lombok.Setter; + +public class EventColumn implements Serializable { + + @Setter + @Getter + private int index; + + @Getter + @Setter + private int columnType; + + @Getter + @Setter + private String columnName; + + /** + * timestamp,Datetime is long + */ + @Setter + private String columnValue; + + private boolean isNull; + + private boolean isKey; + + private boolean isUpdate = true; + + public String getColumnValue() { + if (isNull) { + columnValue = null; + return null; + } else { + return columnValue; + } + } + + public boolean isNull() { + return isNull; + } + + public void setNull(boolean isNull) { + this.isNull = isNull; + } + + public boolean isKey() { + return isKey; + } + + public void setKey(boolean isKey) { + this.isKey = isKey; + } + + public boolean isUpdate() { + return isUpdate; + } + + public void setUpdate(boolean isUpdate) { + this.isUpdate = isUpdate; + } + + public EventColumn clone() { + EventColumn column = new EventColumn(); + column.setIndex(index); + column.setColumnName(columnName); + column.setColumnType(columnType); + column.setColumnValue(columnValue); + column.setKey(isKey); + column.setNull(isNull); + column.setUpdate(isUpdate); + return column; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((columnName == null) ? 0 : columnName.hashCode()); + result = prime * result + columnType; + result = prime * result + ((columnValue == null) ? 0 : columnValue.hashCode()); + result = prime * result + index; + result = prime * result + (isKey ? 1231 : 1237); + result = prime * result + (isNull ? 1231 : 1237); + result = prime * result + (isUpdate ? 1231 : 1237); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + EventColumn other = (EventColumn) obj; + if (columnName == null) { + if (other.columnName != null) { + return false; + } + } else if (!columnName.equals(other.columnName)) { + return false; + } + if (columnType != other.columnType) { + return false; + } + if (columnValue == null) { + if (other.columnValue != null) { + return false; + } + } else if (!columnValue.equals(other.columnValue)) { + return false; + } + if (index != other.index) { + return false; + } + if (isKey != other.isKey) { + return false; + } + if (isNull != other.isNull) { + return false; + } + return isUpdate == other.isUpdate; + } + + @Override + public String toString() { + return "EventColumn{" + + "index=" + index + + ", columnType=" + columnType + + ", columnName='" + columnName + '\'' + + ", columnValue='" + columnValue + '\'' + + ", isNull=" + isNull + + ", isKey=" + isKey + + ", isUpdate=" + isUpdate + + '}'; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventColumnIndexComparable.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventColumnIndexComparable.java new file mode 100644 index 0000000000..ca55f57292 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventColumnIndexComparable.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.model; + +import java.util.Comparator; + +public class EventColumnIndexComparable implements Comparator { + + public int compare(EventColumn o1, EventColumn o2) { + return Integer.compare(o1.getIndex(), o2.getIndex()); + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventType.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventType.java new file mode 100644 index 0000000000..a1537c9f58 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/model/EventType.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.model; + +/** + * chang the eventtype num to I/U/D/C/A/E. + */ +public enum EventType { + + /** + * Insert row. + */ + INSERT("I"), + + /** + * Update row. + */ + UPDATE("U"), + + /** + * Delete row. + */ + DELETE("D"), + + /** + * Create table. + */ + CREATE("C"), + + /** + * Alter table. + */ + ALTER("A"), + + /** + * Erase table. + */ + ERASE("E"), + + /** + * Query. + */ + QUERY("Q"), + + /** + * Truncate. + */ + TRUNCATE("T"), + + /** + * rename. + */ + RENAME("R"), + + /** + * create index. + */ + CINDEX("CI"), + + /** + * drop index. + */ + DINDEX("DI"); + + private String value; + + private EventType(String value) { + this.value = value; + } + + public boolean isInsert() { + return this.equals(EventType.INSERT); + } + + public boolean isUpdate() { + return this.equals(EventType.UPDATE); + } + + public boolean isDelete() { + return this.equals(EventType.DELETE); + } + + public boolean isCreate() { + return this.equals(EventType.CREATE); + } + + public boolean isAlter() { + return this.equals(EventType.ALTER); + } + + public boolean isErase() { + return this.equals(EventType.ERASE); + } + + public boolean isQuery() { + return this.equals(EventType.QUERY); + } + + public boolean isTruncate() { + return this.equals(EventType.TRUNCATE); + } + + public boolean isRename() { + return this.equals(EventType.RENAME); + } + + public boolean isCindex() { + return this.equals(EventType.CINDEX); + } + + public boolean isDindex() { + return this.equals(EventType.DINDEX); + } + + public boolean isDdl() { + return isCreate() || isAlter() || isErase() || isTruncate() || isRename() || isCindex() || isDindex(); + } + + public boolean isDml() { + return isInsert() || isUpdate() || isDelete(); + } + + public static EventType valuesOf(String value) { + EventType[] eventTypes = values(); + for (EventType eventType : eventTypes) { + if (eventType.value.equalsIgnoreCase(value)) { + return eventType; + } + } + return null; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/server/CanalConnectServer.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/server/CanalConnectServer.java new file mode 100644 index 0000000000..6cc3d013dd --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/server/CanalConnectServer.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.server; + +import org.apache.eventmesh.connector.canal.config.CanalServerConfig; +import org.apache.eventmesh.connector.canal.sink.connector.CanalSinkConnector; +import org.apache.eventmesh.connector.canal.source.connector.CanalSourceConnector; +import org.apache.eventmesh.openconnect.Application; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalConnectServer { + + public static void main(String[] args) throws Exception { + + CanalServerConfig serverConfig = ConfigUtil.parse(CanalServerConfig.class, "server-config.yml"); + + if (serverConfig.isSourceEnable()) { + Application canalSourceApp = new Application(); + canalSourceApp.run(CanalSourceConnector.class); + } + + if (serverConfig.isSinkEnable()) { + Application canalSinkApp = new Application(); + canalSinkApp.run(CanalSinkConnector.class); + } + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadContext.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadContext.java new file mode 100644 index 0000000000..3498e87e7b --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadContext.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink; + +import org.apache.eventmesh.connector.canal.CanalConnectRecord; + +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +import lombok.Data; + +@Data +public class DbLoadContext { + + private String gtid; + + private List lastProcessedRecords; + + private List prepareRecords; + + private List processedRecords; + + private List failedRecords; + + public DbLoadContext() { + lastProcessedRecords = Collections.synchronizedList(new LinkedList<>()); + prepareRecords = Collections.synchronizedList(new LinkedList<>()); + processedRecords = Collections.synchronizedList(new LinkedList<>()); + failedRecords = Collections.synchronizedList(new LinkedList<>()); + } + + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadData.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadData.java new file mode 100644 index 0000000000..ea48de7749 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadData.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink; + +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.model.EventType; + +import java.util.ArrayList; +import java.util.List; + +/** + * Classify the data according to the table and insert/update/delete types. + * + *

+ * Purpose of classification: to optimize the insert statement in batches.
+ * 1. Due to the restrictions of MySQL indexes, concurrent execution of insert statements needs to be avoided.
+ * 
+ */ +public class DbLoadData { + + private List tables = new ArrayList(); + + public DbLoadData() { + // nothing + } + + public DbLoadData(List records) { + for (CanalConnectRecord record : records) { + merge(record); + } + } + + public void merge(CanalConnectRecord record) { + TableLoadData tableData = findTableData(record); + + EventType type = record.getEventType(); + if (type.isInsert()) { + tableData.getInsertDatas().add(record); + } else if (type.isUpdate()) { + tableData.getUpdateDatas().add(record); + } else if (type.isDelete()) { + tableData.getDeleteDatas().add(record); + } + } + + public List getTables() { + return tables; + } + + private synchronized TableLoadData findTableData(CanalConnectRecord record) { + for (TableLoadData table : tables) { + if (table.getSchemaName().equals(record.getSchemaName()) + && table.getTableName().equals(record.getTableName())) { + return table; + } + } + + TableLoadData data = new TableLoadData(record.getSchemaName(), record.getTableName()); + tables.add(data); + return data; + } + + /** + * classify by table + */ + public static class TableLoadData { + + private String schemaName; + + private String tableName; + private List insertDatas = new ArrayList<>(); + private List upadateDatas = new ArrayList<>(); + private List deleteDatas = new ArrayList<>(); + + public TableLoadData(String schemaName, String tableName) { + this.schemaName = schemaName; + this.tableName = tableName; + } + + public List getInsertDatas() { + return insertDatas; + } + + public void setInsertDatas(List insertDatas) { + this.insertDatas = insertDatas; + } + + public List getUpdateDatas() { + return upadateDatas; + } + + public void setUpdateDatas(List upadateDatas) { + this.upadateDatas = upadateDatas; + } + + public List getDeleteDatas() { + return deleteDatas; + } + + public void setDeleteDatas(List deleteDatas) { + this.deleteDatas = deleteDatas; + } + + public String getSchemaName() { + return schemaName; + } + + public void setSchemaName(String schemaName) { + this.schemaName = schemaName; + } + + public String getTableName() { + return tableName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadMerger.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadMerger.java new file mode 100644 index 0000000000..af53532dd8 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/DbLoadMerger.java @@ -0,0 +1,280 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink; + +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.model.EventColumn; +import org.apache.eventmesh.connector.canal.model.EventColumnIndexComparable; +import org.apache.eventmesh.connector.canal.model.EventType; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.springframework.util.CollectionUtils; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.slf4j.Slf4j; + + +/** + *
+ * merge the same schema-table change record.
+ * The result of merging multiple change data with the same primary key (pk) is:
+ * 1, I
+ * 2, U
+ * 3, D
+ * If there is one "I" (Insert) and multiple "U" (Update), merge into "I";
+ * If there are multiple "U" (Update), take the latest one;
+ * 
+ */ +@Slf4j +public class DbLoadMerger { + + /** + * Merge a batch of data based on table and primary key information, + * ensuring that there is only one record for each primary key in a table + * + * @param eventDatas + * @return + */ + public static List merge(List eventDatas) { + Map result = new LinkedHashMap(); + for (CanalConnectRecord eventData : eventDatas) { + merge(eventData, result); + } + return new LinkedList<>(result.values()); + } + + public static void merge(CanalConnectRecord record, Map result) { + EventType eventType = record.getEventType(); + switch (eventType) { + case INSERT: + mergeInsert(record, result); + break; + case UPDATE: + mergeUpdate(record, result); + break; + case DELETE: + mergeDelete(record, result); + break; + default: + break; + } + } + + private static void mergeInsert(CanalConnectRecord record, Map result) { + RowKey rowKey = new RowKey(record.getSchemaName(), record.getTableName(), + record.getKeys()); + if (!result.containsKey(rowKey)) { + result.put(rowKey, record); + } else { + CanalConnectRecord oldRecord = result.get(rowKey); + record.setSize(oldRecord.getSize() + record.getSize()); + if (oldRecord.getEventType() == EventType.DELETE) { + result.put(rowKey, record); + } else if (record.getEventType() == EventType.UPDATE + || record.getEventType() == EventType.INSERT) { + log.warn("update-insert/insert-insert happend. before[{}] , after[{}]", oldRecord, record); + CanalConnectRecord mergeEventData = replaceColumnValue(record, oldRecord); + mergeEventData.getOldKeys().clear(); + result.put(rowKey, mergeEventData); + } + } + } + + private static void mergeUpdate(CanalConnectRecord record, Map result) { + RowKey rowKey = new RowKey(record.getSchemaName(), record.getTableName(), record.getKeys()); + if (!CollectionUtils.isEmpty(record.getOldKeys())) { + RowKey oldKey = new RowKey(record.getSchemaName(), record.getTableName(), + record.getOldKeys()); + if (!result.containsKey(oldKey)) { + result.put(rowKey, record); + } else { + CanalConnectRecord oldRecord = result.get(oldKey); + record.setSize(oldRecord.getSize() + record.getSize()); + if (oldRecord.getEventType() == EventType.INSERT) { + record.setEventType(EventType.INSERT); + result.remove(oldKey); + + CanalConnectRecord mergeEventData = replaceColumnValue(record, oldRecord); + mergeEventData.getOldKeys().clear(); + result.put(rowKey, mergeEventData); + } else if (oldRecord.getEventType() == EventType.UPDATE) { + result.remove(oldKey); + CanalConnectRecord mergeEventData = replaceColumnValue(record, oldRecord); + result.put(rowKey, mergeEventData); + } else { + throw new RuntimeException("delete(has old pks) + update impossible happed!"); + } + } + } else { + if (!result.containsKey(rowKey)) { + result.put(rowKey, record); + } else { + CanalConnectRecord oldRecord = result.get(rowKey); + if (oldRecord.getEventType() == EventType.INSERT) { + oldRecord.setEventType(EventType.INSERT); + + CanalConnectRecord mergeEventData = replaceColumnValue(record, oldRecord); + result.put(rowKey, mergeEventData); + } else if (oldRecord.getEventType() == EventType.UPDATE) { + CanalConnectRecord mergeEventData = replaceColumnValue(record, oldRecord); + result.put(rowKey, mergeEventData); + } else if (oldRecord.getEventType() == EventType.DELETE) { + result.put(rowKey, record); + } + } + } + } + + private static void mergeDelete(CanalConnectRecord record, Map result) { + RowKey rowKey = new RowKey(record.getSchemaName(), record.getTableName(), + record.getKeys()); + if (!result.containsKey(rowKey)) { + result.put(rowKey, record); + } else { + CanalConnectRecord oldRecord = result.get(rowKey); + record.setSize(oldRecord.getSize() + record.getSize()); + if (!CollectionUtils.isEmpty(oldRecord.getOldKeys())) { + record.setKeys(oldRecord.getOldKeys()); + record.getOldKeys().clear(); + + result.remove(rowKey); + result.put(new RowKey(record.getSchemaName(), record.getTableName(), + record.getKeys()), record); + } else { + record.getOldKeys().clear(); + result.put(rowKey, record); + } + + } + } + + /** + * Merge the old value that exists in the old record and does not exist in the new record into the new record, + * and save the old primary key of the last change to the old primary key of this change. + * + * @param newRecord + * @param oldRecord + * @return + */ + private static CanalConnectRecord replaceColumnValue(CanalConnectRecord newRecord, CanalConnectRecord oldRecord) { + List newColumns = newRecord.getColumns(); + List oldColumns = oldRecord.getColumns(); + List temp = new ArrayList<>(); + for (EventColumn oldColumn : oldColumns) { + boolean contain = false; + for (EventColumn newColumn : newColumns) { + if (oldColumn.getColumnName().equalsIgnoreCase(newColumn.getColumnName())) { + newColumn.setUpdate(newColumn.isUpdate() || oldColumn.isUpdate()); + contain = true; + } + } + + if (!contain) { + temp.add(oldColumn); + } + } + newColumns.addAll(temp); + Collections.sort(newColumns, new EventColumnIndexComparable()); + newRecord.setOldKeys(oldRecord.getOldKeys()); + if (oldRecord.getSyncConsistency() != null) { + newRecord.setSyncConsistency(oldRecord.getSyncConsistency()); + } + if (oldRecord.getSyncMode() != null) { + newRecord.setSyncMode(oldRecord.getSyncMode()); + } + + if (oldRecord.isRemedy()) { + newRecord.setRemedy(true); + } + newRecord.setSize(oldRecord.getSize() + newRecord.getSize()); + return newRecord; + } + + @Setter + @Getter + public static class RowKey implements Serializable { + + private String schemaName; + private String tableName; + + public RowKey(String schemaName, String tableName, List keys) { + this.schemaName = schemaName; + this.tableName = tableName; + this.keys = keys; + } + + public RowKey(List keys) { + this.keys = keys; + } + + private List keys = new ArrayList(); + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((keys == null) ? 0 : keys.hashCode()); + result = prime * result + ((schemaName == null) ? 0 : schemaName.hashCode()); + result = prime * result + ((tableName == null) ? 0 : tableName.hashCode()); + return result; + } + + @SuppressWarnings("checkstyle:NeedBraces") + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof RowKey)) { + return false; + } + RowKey other = (RowKey) obj; + if (keys == null) { + if (other.keys != null) { + return false; + } + } else if (!keys.equals(other.keys)) { + return false; + } + if (schemaName == null) { + if (other.schemaName != null) { + return false; + } + } else if (!schemaName.equals(other.schemaName)) { + return false; + } + if (tableName == null) { + return other.tableName == null; + } else { + return tableName.equals(other.tableName); + } + } + + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/GtidBatch.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/GtidBatch.java new file mode 100644 index 0000000000..dd6559b832 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/GtidBatch.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink; + +import org.apache.eventmesh.connector.canal.CanalConnectRecord; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +public class GtidBatch { + private int totalBatches; + private List> batches; + private int receivedBatchCount; + + public GtidBatch(int totalBatches) { + this.totalBatches = totalBatches; + this.batches = new CopyOnWriteArrayList<>(new List[totalBatches]); + this.receivedBatchCount = 0; + } + + public void addBatch(int batchIndex, List batchRecords) { + batches.set(batchIndex, batchRecords); + receivedBatchCount++; + } + + public List> getBatches() { + return batches; + } + + public boolean isComplete() { + return receivedBatchCount == totalBatches; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/GtidBatchManager.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/GtidBatchManager.java new file mode 100644 index 0000000000..30060aa8f5 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/GtidBatchManager.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink; + +import org.apache.eventmesh.connector.canal.CanalConnectRecord; + +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +public class GtidBatchManager { + + private static ConcurrentHashMap gtidBatchMap = new ConcurrentHashMap<>(); + + public static void addBatch(String gtid, int batchIndex, int totalBatches, List batchRecords) { + gtidBatchMap.computeIfAbsent(gtid, k -> new GtidBatch(totalBatches)).addBatch(batchIndex, batchRecords); + } + + public static GtidBatch getGtidBatch(String gtid) { + return gtidBatchMap.get(gtid); + } + + public static boolean isComplete(String gtid) { + GtidBatch batch = gtidBatchMap.get(gtid); + return batch != null && batch.isComplete(); + } + + public static void removeGtidBatch(String gtid) { + gtidBatchMap.remove(gtid); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java new file mode 100644 index 0000000000..84e01ca85c --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkCheckConnector.java @@ -0,0 +1,406 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.Constants; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLColumnDef; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.SqlUtils; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import org.apache.commons.lang3.StringUtils; + +import java.math.BigDecimal; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Types; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.LockSupport; + +import com.alibaba.druid.pool.DruidPooledConnection; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSinkCheckConnector implements Sink, ConnectorCreateService { + private CanalSinkFullConfig config; + private RdbTableMgr tableMgr; + private final DateTimeFormatter dataTimePattern = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSSSSS"); + + @Override + public void start() throws Exception { + tableMgr.start(); + } + + @Override + public void stop() throws Exception { + + } + + @Override + public Sink create() { + return new CanalSinkCheckConnector(); + } + + @Override + public Class configClass() { + return CanalSinkFullConfig.class; + } + + @Override + public void init(Config config) throws Exception { + this.config = (CanalSinkFullConfig) config; + init(); + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + this.config = (CanalSinkFullConfig) ((SinkConnectorContext) connectorContext).getSinkConfig(); + init(); + } + + private void init() { + if (config.getSinkConnectorConfig() == null) { + throw new EventMeshException(String.format("[%s] sink config is null", this.getClass())); + } + DatabaseConnection.sinkConfig = this.config.getSinkConnectorConfig(); + DatabaseConnection.initSinkConnection(); + DatabaseConnection.sinkDataSource.setDefaultAutoCommit(false); + + tableMgr = new RdbTableMgr(this.config.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); + } + + @Override + public void commit(ConnectRecord record) { + + } + + @Override + public String name() { + return null; + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void put(List sinkRecords) { + if (sinkRecords == null || sinkRecords.isEmpty() || sinkRecords.get(0) == null) { + if (log.isDebugEnabled()) { + log.debug("[{}] got sink records are none", this.getClass()); + } + return; + } + ConnectRecord record = sinkRecords.get(0); + List> data = (List>) record.getData(); + if (data == null || data.isEmpty()) { + if (log.isDebugEnabled()) { + log.debug("[{}] got rows data is none", this.getClass()); + } + return; + } + CanalFullRecordOffset offset = (CanalFullRecordOffset) record.getPosition().getRecordOffset(); + if (offset == null || offset.getPosition() == null) { + if (log.isDebugEnabled()) { + log.debug("[{}] got canal full offset is none", this.getClass()); + } + return; + } + + MySQLTableDef tableDefinition = (MySQLTableDef) tableMgr.getTable(offset.getPosition().getSchema(), offset.getPosition().getTableName()); + if (tableDefinition == null) { + log.warn("target schema [{}] table [{}] is not exists", offset.getPosition().getSchema(), offset.getPosition().getTableName()); + return; + } + List cols = new ArrayList<>(tableDefinition.getColumnDefinitions().values()); + String sql = generateInsertPrepareSql(offset.getPosition().getSchema(), offset.getPosition().getTableName(), + cols); + DruidPooledConnection connection = null; + PreparedStatement statement = null; + try { + connection = DatabaseConnection.sinkDataSource.getConnection(); + statement = + connection.prepareStatement(sql); + for (Map col : data) { + setPrepareParams(statement, col, cols); + log.info("insert sql {}", statement.toString()); + statement.addBatch(); + } + statement.executeBatch(); + connection.commit(); + } catch (SQLException e) { + log.warn("full sink process schema [{}] table [{}] connector write fail", tableDefinition.getSchemaName(), tableDefinition.getTableName(), + e); + LockSupport.parkNanos(3000 * 1000L); + } catch (Exception e) { + log.error("full sink process schema [{}] table [{}] catch unknown exception", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + try { + if (connection != null && !connection.isClosed()) { + connection.rollback(); + } + } catch (SQLException rollback) { + log.warn("full sink process schema [{}] table [{}] rollback fail", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + } + } finally { + if (statement != null) { + try { + statement.close(); + } catch (SQLException e) { + log.info("close prepare statement fail", e); + } + } + + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + log.info("close db connection fail", e); + } + } + } + } + + private void setPrepareParams(PreparedStatement preparedStatement, Map col, List columnDefs) throws Exception { + for (int i = 0; i < columnDefs.size(); i++) { + writeColumn(preparedStatement, i + 1, columnDefs.get(i), col.get(columnDefs.get(i).getName())); + } + } + + public void writeColumn(PreparedStatement ps, int index, MySQLColumnDef colType, Object value) throws Exception { + if (colType == null) { + String colVal = null; + if (value != null) { + colVal = value.toString(); + } + if (colVal == null) { + ps.setNull(index, Types.VARCHAR); + } else { + ps.setString(index, colVal); + } + } else if (value == null) { + ps.setNull(index, colType.getJdbcType().getVendorTypeNumber()); + } else { + switch (colType.getType()) { + case TINYINT: + case SMALLINT: + case MEDIUMINT: + case INT: + Long longValue = SqlUtils.toLong(value); + if (longValue == null) { + ps.setNull(index, 4); + return; + } else { + ps.setLong(index, longValue); + return; + } + case BIGINT: + case DECIMAL: + BigDecimal bigDecimalValue = SqlUtils.toBigDecimal(value); + if (bigDecimalValue == null) { + ps.setNull(index, 3); + return; + } else { + ps.setBigDecimal(index, bigDecimalValue); + return; + } + case FLOAT: + case DOUBLE: + Double doubleValue = SqlUtils.toDouble(value); + if (doubleValue == null) { + ps.setNull(index, 8); + } else { + ps.setDouble(index, doubleValue); + } + return; + case DATE: + case DATETIME: + case TIMESTAMP: + LocalDateTime dateValue = null; + if (!SqlUtils.isZeroTime(value)) { + try { + dateValue = SqlUtils.toLocalDateTime(value); + } catch (Exception e) { + ps.setString(index, SqlUtils.convertToString(value)); + return; + } + } else if (StringUtils.isNotBlank(config.getZeroDate())) { + dateValue = SqlUtils.toLocalDateTime(config.getZeroDate()); + } else { + ps.setObject(index, value); + return; + } + if (dateValue == null) { + ps.setNull(index, Types.TIMESTAMP); + } else { + ps.setString(index, dataTimePattern.format(dateValue)); + } + return; + case TIME: + String timeValue = SqlUtils.toMySqlTime(value); + if (StringUtils.isBlank(timeValue)) { + ps.setNull(index, 12); + return; + } else { + ps.setString(index, timeValue); + return; + } + case YEAR: + LocalDateTime yearValue = null; + if (!SqlUtils.isZeroTime(value)) { + yearValue = SqlUtils.toLocalDateTime(value); + } else if (StringUtils.isNotBlank(config.getZeroDate())) { + yearValue = SqlUtils.toLocalDateTime(config.getZeroDate()); + } else { + ps.setInt(index, 0); + return; + } + if (yearValue == null) { + ps.setNull(index, 4); + } else { + ps.setInt(index, yearValue.getYear()); + } + return; + case CHAR: + case VARCHAR: + case TINYTEXT: + case TEXT: + case MEDIUMTEXT: + case LONGTEXT: + case ENUM: + case SET: + String strValue = value.toString(); + if (strValue == null) { + ps.setNull(index, Types.VARCHAR); + return; + } else { + ps.setString(index, strValue); + return; + } + case JSON: + String jsonValue = value.toString(); + if (jsonValue == null) { + ps.setNull(index, Types.VARCHAR); + } else { + ps.setString(index, jsonValue); + } + return; + case BIT: + if (value instanceof Boolean) { + byte[] arrayBoolean = new byte[1]; + arrayBoolean[0] = (byte) (Boolean.TRUE.equals(value) ? 1 : 0); + ps.setBytes(index, arrayBoolean); + return; + } else if (value instanceof Number) { + ps.setBytes(index, SqlUtils.numberToBinaryArray((Number) value)); + return; + } else if ((value instanceof byte[]) || value.toString().startsWith("0x") || value.toString().startsWith("0X")) { + byte[] arrayBoolean = SqlUtils.toBytes(value); + if (arrayBoolean == null || arrayBoolean.length == 0) { + ps.setNull(index, Types.BIT); + return; + } else { + ps.setBytes(index, arrayBoolean); + return; + } + } else { + ps.setBytes(index, SqlUtils.numberToBinaryArray(SqlUtils.toInt(value))); + return; + } + case BINARY: + case VARBINARY: + case TINYBLOB: + case BLOB: + case MEDIUMBLOB: + case LONGBLOB: + byte[] binaryValue = SqlUtils.toBytes(value); + if (binaryValue == null) { + ps.setNull(index, Types.BINARY); + return; + } else { + ps.setBytes(index, binaryValue); + return; + } + case GEOMETRY: + case GEOMETRY_COLLECTION: + case GEOM_COLLECTION: + case POINT: + case LINESTRING: + case POLYGON: + case MULTIPOINT: + case MULTILINESTRING: + case MULTIPOLYGON: + String geoValue = SqlUtils.toGeometry(value); + if (geoValue == null) { + ps.setNull(index, Types.VARCHAR); + return; + } + ps.setString(index, geoValue); + return; + default: + throw new UnsupportedOperationException("columnType '" + colType + "' Unsupported."); + } + } + } + + private String generateInsertPrepareSql(String schema, String table, List cols) { + StringBuilder builder = new StringBuilder(); + builder.append("INSERT IGNORE INTO "); + builder.append(Constants.MySQLQuot); + builder.append(schema); + builder.append(Constants.MySQLQuot); + builder.append("."); + builder.append(Constants.MySQLQuot); + builder.append(table); + builder.append(Constants.MySQLQuot); + StringBuilder columns = new StringBuilder(); + StringBuilder values = new StringBuilder(); + for (MySQLColumnDef colInfo : cols) { + if (columns.length() > 0) { + columns.append(", "); + values.append(", "); + } + String wrapName = Constants.MySQLQuot + colInfo.getName() + Constants.MySQLQuot; + columns.append(wrapName); + values.append(colInfo.getType() == null ? "?" : colInfo.getType().genPrepareStatement4Insert()); + } + builder.append("(").append(columns).append(")"); + builder.append(" VALUES "); + builder.append("(").append(values).append(")"); + return builder.toString(); + } + + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java new file mode 100644 index 0000000000..b03df2dfff --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkConnector.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSinkConnector implements Sink, ConnectorCreateService { + + private CanalSinkConfig sinkConfig; + + private Sink sink; + + @Override + public Class configClass() { + return CanalSinkConfig.class; + } + + @Override + public void init(Config config) throws Exception { + // init config for canal source connector + this.sinkConfig = (CanalSinkConfig) config; + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + // init config for canal source connector + SinkConnectorContext sinkConnectorContext = (SinkConnectorContext) connectorContext; + if (sinkConnectorContext.getJobType().equals(JobType.FULL)) { + this.sink = new CanalSinkFullConnector(); + } else if (sinkConnectorContext.getJobType().equals(JobType.INCREASE)) { + this.sink = new CanalSinkIncrementConnector(); + } else if (sinkConnectorContext.getJobType().equals(JobType.CHECK)) { + this.sink = new CanalSinkCheckConnector(); + } else { + throw new RuntimeException("unsupported job type " + sinkConnectorContext.getJobType()); + } + this.sink.init(sinkConnectorContext); + } + + @Override + public void start() throws Exception { + this.sink.start(); + } + + @Override + public void commit(ConnectRecord record) { + this.sink.commit(record); + } + + @Override + public String name() { + return this.sink.name(); + } + + @Override + public void onException(ConnectRecord record) { + this.sink.onException(record); + } + + @Override + public void stop() throws Exception { + this.sink.stop(); + } + + @Override + public void put(List sinkRecords) { + this.sink.put(sinkRecords); + } + + @Override + public Sink create() { + return new CanalSinkConnector(); + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java new file mode 100644 index 0000000000..4137123922 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkFullConnector.java @@ -0,0 +1,439 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.Constants; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLColumnDef; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.SqlUtils; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import org.apache.commons.lang3.StringUtils; + +import java.math.BigDecimal; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Types; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.LockSupport; + +import com.alibaba.druid.pool.DruidPooledConnection; +import com.fasterxml.jackson.core.type.TypeReference; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSinkFullConnector implements Sink, ConnectorCreateService { + + private CanalSinkFullConfig config; + private RdbTableMgr tableMgr; + private final DateTimeFormatter dataTimePattern = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSSSSS"); + + @Override + public void start() throws Exception { + tableMgr.start(); + } + + @Override + public void stop() throws Exception { + + } + + @Override + public Sink create() { + return new CanalSinkFullConnector(); + } + + @Override + public Class configClass() { + return CanalSinkFullConfig.class; + } + + @Override + public void init(Config config) throws Exception { + this.config = (CanalSinkFullConfig) config; + init(); + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SinkConnectorContext sinkConnectorContext = (SinkConnectorContext) connectorContext; + CanalSinkConfig canalSinkConfig = (CanalSinkConfig) sinkConnectorContext.getSinkConfig(); + this.config = ConfigUtil.parse(canalSinkConfig.getSinkConfig(), CanalSinkFullConfig.class); + init(); + } + + private void init() { + if (config.getSinkConnectorConfig() == null) { + throw new EventMeshException(String.format("[%s] sink config is null", this.getClass())); + } + DatabaseConnection.sinkConfig = this.config.getSinkConnectorConfig(); + DatabaseConnection.initSinkConnection(); + DatabaseConnection.sinkDataSource.setDefaultAutoCommit(false); + + tableMgr = new RdbTableMgr(this.config.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); + } + + @Override + public void commit(ConnectRecord record) { + + } + + @Override + public String name() { + return null; + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void put(List sinkRecords) { + if (sinkRecords == null || sinkRecords.isEmpty() || sinkRecords.get(0) == null) { + if (log.isDebugEnabled()) { + log.debug("[{}] got sink records are none", this.getClass()); + } + return; + } + ConnectRecord record = sinkRecords.get(0); + List> data = + JsonUtils.parseTypeReferenceObject((byte[]) record.getData(), new TypeReference>>() { + }); + if (data == null || data.isEmpty()) { + if (log.isDebugEnabled()) { + log.debug("[{}] got rows data is none", this.getClass()); + } + return; + } + CanalFullRecordOffset offset = (CanalFullRecordOffset) record.getPosition().getRecordOffset(); + if (offset == null || offset.getPosition() == null) { + if (log.isDebugEnabled()) { + log.debug("[{}] got canal full offset is none", this.getClass()); + } + return; + } + + MySQLTableDef tableDefinition = (MySQLTableDef) tableMgr.getTable(offset.getPosition().getSchema(), offset.getPosition().getTableName()); + if (tableDefinition == null) { + log.warn("target schema [{}] table [{}] is not exists", offset.getPosition().getSchema(), offset.getPosition().getTableName()); + return; + } + List cols = new ArrayList<>(tableDefinition.getColumnDefinitions().values()); + String sql = generateInsertPrepareSql(offset.getPosition().getSchema(), offset.getPosition().getTableName(), + cols); + DruidPooledConnection connection = null; + PreparedStatement statement = null; + try { + connection = DatabaseConnection.sinkDataSource.getConnection(); + statement = + connection.prepareStatement(sql); + for (Map col : data) { + setPrepareParams(statement, col, cols); + log.info("insert sql {}", statement.toString()); + statement.addBatch(); + } + statement.executeBatch(); + connection.commit(); + record.getCallback().onSuccess(convertToSendResult(record)); + } catch (SQLException e) { + log.warn("full sink process schema [{}] table [{}] connector write fail", tableDefinition.getSchemaName(), tableDefinition.getTableName(), + e); + LockSupport.parkNanos(3000 * 1000L); + record.getCallback().onException(buildSendExceptionContext(record, e)); + } catch (Exception e) { + log.error("full sink process schema [{}] table [{}] catch unknown exception", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + record.getCallback().onException(buildSendExceptionContext(record, e)); + try { + if (connection != null && !connection.isClosed()) { + connection.rollback(); + } + } catch (SQLException rollback) { + log.warn("full sink process schema [{}] table [{}] rollback fail", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + } + } finally { + if (statement != null) { + try { + statement.close(); + } catch (SQLException e) { + log.info("close prepare statement fail", e); + } + } + + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + log.info("close db connection fail", e); + } + } + } + } + + private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { + SendExceptionContext sendExceptionContext = new SendExceptionContext(); + sendExceptionContext.setMessageId(record.getRecordId()); + sendExceptionContext.setCause(e); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + sendExceptionContext.setTopic(record.getExtension("topic")); + } + return sendExceptionContext; + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + + private void setPrepareParams(PreparedStatement preparedStatement, Map col, List columnDefs) throws Exception { + for (int i = 0; i < columnDefs.size(); i++) { + writeColumn(preparedStatement, i + 1, columnDefs.get(i), col.get(columnDefs.get(i).getName())); + } + } + + public void writeColumn(PreparedStatement ps, int index, MySQLColumnDef colType, Object value) throws Exception { + if (colType == null) { + String colVal = null; + if (value != null) { + colVal = value.toString(); + } + if (colVal == null) { + ps.setNull(index, Types.VARCHAR); + } else { + ps.setString(index, colVal); + } + } else if (value == null) { + ps.setNull(index, colType.getJdbcType().getVendorTypeNumber()); + } else { + switch (colType.getType()) { + case TINYINT: + case SMALLINT: + case MEDIUMINT: + case INT: + Long longValue = SqlUtils.toLong(value); + if (longValue == null) { + ps.setNull(index, 4); + return; + } else { + ps.setLong(index, longValue); + return; + } + case BIGINT: + case DECIMAL: + BigDecimal bigDecimalValue = SqlUtils.toBigDecimal(value); + if (bigDecimalValue == null) { + ps.setNull(index, 3); + return; + } else { + ps.setBigDecimal(index, bigDecimalValue); + return; + } + case FLOAT: + case DOUBLE: + Double doubleValue = SqlUtils.toDouble(value); + if (doubleValue == null) { + ps.setNull(index, 8); + } else { + ps.setDouble(index, doubleValue); + } + return; + case DATE: + case DATETIME: + case TIMESTAMP: + LocalDateTime dateValue = null; + if (!SqlUtils.isZeroTime(value)) { + try { + dateValue = SqlUtils.toLocalDateTime(value); + } catch (Exception e) { + ps.setString(index, SqlUtils.convertToString(value)); + return; + } + } else if (StringUtils.isNotBlank(config.getZeroDate())) { + dateValue = SqlUtils.toLocalDateTime(config.getZeroDate()); + } else { + ps.setObject(index, value); + return; + } + if (dateValue == null) { + ps.setNull(index, Types.TIMESTAMP); + } else { + ps.setString(index, dataTimePattern.format(dateValue)); + } + return; + case TIME: + String timeValue = SqlUtils.toMySqlTime(value); + if (StringUtils.isBlank(timeValue)) { + ps.setNull(index, 12); + return; + } else { + ps.setString(index, timeValue); + return; + } + case YEAR: + LocalDateTime yearValue = null; + if (!SqlUtils.isZeroTime(value)) { + yearValue = SqlUtils.toLocalDateTime(value); + } else if (StringUtils.isNotBlank(config.getZeroDate())) { + yearValue = SqlUtils.toLocalDateTime(config.getZeroDate()); + } else { + ps.setInt(index, 0); + return; + } + if (yearValue == null) { + ps.setNull(index, 4); + } else { + ps.setInt(index, yearValue.getYear()); + } + return; + case CHAR: + case VARCHAR: + case TINYTEXT: + case TEXT: + case MEDIUMTEXT: + case LONGTEXT: + case ENUM: + case SET: + String strValue = value.toString(); + if (strValue == null) { + ps.setNull(index, Types.VARCHAR); + return; + } else { + ps.setString(index, strValue); + return; + } + case JSON: + String jsonValue = value.toString(); + if (jsonValue == null) { + ps.setNull(index, Types.VARCHAR); + } else { + ps.setString(index, jsonValue); + } + return; + case BIT: + if (value instanceof Boolean) { + byte[] arrayBoolean = new byte[1]; + arrayBoolean[0] = (byte) (Boolean.TRUE.equals(value) ? 1 : 0); + ps.setBytes(index, arrayBoolean); + return; + } else if (value instanceof Number) { + ps.setBytes(index, SqlUtils.numberToBinaryArray((Number) value)); + return; + } else if ((value instanceof byte[]) || value.toString().startsWith("0x") || value.toString().startsWith("0X")) { + byte[] arrayBoolean = SqlUtils.toBytes(value); + if (arrayBoolean == null || arrayBoolean.length == 0) { + ps.setNull(index, Types.BIT); + return; + } else { + ps.setBytes(index, arrayBoolean); + return; + } + } else { + ps.setBytes(index, SqlUtils.numberToBinaryArray(SqlUtils.toInt(value))); + return; + } + case BINARY: + case VARBINARY: + case TINYBLOB: + case BLOB: + case MEDIUMBLOB: + case LONGBLOB: + byte[] binaryValue = SqlUtils.toBytes(value); + if (binaryValue == null) { + ps.setNull(index, Types.BINARY); + return; + } else { + ps.setBytes(index, binaryValue); + return; + } + case GEOMETRY: + case GEOMETRY_COLLECTION: + case GEOM_COLLECTION: + case POINT: + case LINESTRING: + case POLYGON: + case MULTIPOINT: + case MULTILINESTRING: + case MULTIPOLYGON: + String geoValue = SqlUtils.toGeometry(value); + if (geoValue == null) { + ps.setNull(index, Types.VARCHAR); + return; + } + ps.setString(index, geoValue); + return; + default: + throw new UnsupportedOperationException("columnType '" + colType + "' Unsupported."); + } + } + } + + private String generateInsertPrepareSql(String schema, String table, List cols) { + StringBuilder builder = new StringBuilder(); + builder.append("INSERT IGNORE INTO "); + builder.append(Constants.MySQLQuot); + builder.append(schema); + builder.append(Constants.MySQLQuot); + builder.append("."); + builder.append(Constants.MySQLQuot); + builder.append(table); + builder.append(Constants.MySQLQuot); + StringBuilder columns = new StringBuilder(); + StringBuilder values = new StringBuilder(); + for (MySQLColumnDef colInfo : cols) { + if (columns.length() > 0) { + columns.append(", "); + values.append(", "); + } + String wrapName = Constants.MySQLQuot + colInfo.getName() + Constants.MySQLQuot; + columns.append(wrapName); + values.append(colInfo.getType() == null ? "?" : colInfo.getType().genPrepareStatement4Insert()); + } + builder.append("(").append(columns).append(")"); + builder.append(" VALUES "); + builder.append("(").append(values).append(")"); + return builder.toString(); + } + + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java new file mode 100644 index 0000000000..e165a5ffe6 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/sink/connector/CanalSinkIncrementConnector.java @@ -0,0 +1,865 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.sink.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSinkIncrementConfig; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.SqlUtils; +import org.apache.eventmesh.connector.canal.dialect.DbDialect; +import org.apache.eventmesh.connector.canal.dialect.MysqlDialect; +import org.apache.eventmesh.connector.canal.interceptor.SqlBuilderLoadInterceptor; +import org.apache.eventmesh.connector.canal.model.EventColumn; +import org.apache.eventmesh.connector.canal.model.EventType; +import org.apache.eventmesh.connector.canal.sink.DbLoadContext; +import org.apache.eventmesh.connector.canal.sink.DbLoadData; +import org.apache.eventmesh.connector.canal.sink.DbLoadData.TableLoadData; +import org.apache.eventmesh.connector.canal.sink.DbLoadMerger; +import org.apache.eventmesh.connector.canal.sink.GtidBatch; +import org.apache.eventmesh.connector.canal.sink.GtidBatchManager; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.lang3.SerializationUtils; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import org.springframework.dao.DataAccessException; +import org.springframework.dao.DeadlockLoserDataAccessException; +import org.springframework.jdbc.core.BatchPreparedStatementSetter; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.PreparedStatementSetter; +import org.springframework.jdbc.core.StatementCallback; +import org.springframework.jdbc.core.StatementCreatorUtils; +import org.springframework.jdbc.support.lob.DefaultLobHandler; +import org.springframework.jdbc.support.lob.LobCreator; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.util.CollectionUtils; + +import com.alibaba.otter.canal.common.utils.NamedThreadFactory; +import com.fasterxml.jackson.core.type.TypeReference; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSinkIncrementConnector implements Sink, ConnectorCreateService { + + private CanalSinkIncrementConfig sinkConfig; + + private JdbcTemplate jdbcTemplate; + + private SqlBuilderLoadInterceptor interceptor; + + private DbDialect dbDialect; + + private ExecutorService executor; + + private ExecutorService gtidSingleExecutor; + + private int batchSize = 50; + + private boolean useBatch = true; + + private RdbTableMgr tableMgr; + + @Override + public Class configClass() { + return CanalSinkIncrementConfig.class; + } + + @Override + public void init(Config config) throws Exception { + // init config for canal source connector + this.sinkConfig = (CanalSinkIncrementConfig) config; + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + // init config for canal source connector + SinkConnectorContext sinkConnectorContext = (SinkConnectorContext) connectorContext; + CanalSinkConfig canalSinkConfig = (CanalSinkConfig) sinkConnectorContext.getSinkConfig(); + this.sinkConfig = ConfigUtil.parse(canalSinkConfig.getSinkConfig(), CanalSinkIncrementConfig.class); + this.batchSize = sinkConfig.getBatchSize(); + this.useBatch = sinkConfig.getUseBatch(); + DatabaseConnection.sinkConfig = this.sinkConfig.getSinkConnectorConfig(); + DatabaseConnection.initSinkConnection(); + jdbcTemplate = new JdbcTemplate(DatabaseConnection.sinkDataSource); + dbDialect = new MysqlDialect(jdbcTemplate, new DefaultLobHandler()); + interceptor = new SqlBuilderLoadInterceptor(); + interceptor.setDbDialect(dbDialect); + tableMgr = new RdbTableMgr(sinkConfig.getSinkConnectorConfig(), DatabaseConnection.sinkDataSource); + executor = new ThreadPoolExecutor(sinkConfig.getPoolSize(), + sinkConfig.getPoolSize(), + 0L, + TimeUnit.MILLISECONDS, + new ArrayBlockingQueue<>(sinkConfig.getPoolSize() * 4), + new NamedThreadFactory("canalSink"), + new ThreadPoolExecutor.CallerRunsPolicy()); + gtidSingleExecutor = Executors.newSingleThreadExecutor(r -> new Thread(r, "gtidSingleExecutor")); + } + + @Override + public void start() throws Exception { + tableMgr.start(); + } + + @Override + public void commit(ConnectRecord record) { + + } + + @Override + public String name() { + return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void stop() { + executor.shutdown(); + gtidSingleExecutor.shutdown(); + } + + @Override + public void put(List sinkRecords) { + DbLoadContext context = new DbLoadContext(); + for (ConnectRecord connectRecord : sinkRecords) { + List canalConnectRecordList = new ArrayList<>(); + + List canalConnectRecords = convertToCanalConnectRecord(connectRecord); + + // deep copy connectRecord data + for (CanalConnectRecord record : canalConnectRecords) { + canalConnectRecordList.add(SerializationUtils.clone(record)); + } + canalConnectRecordList = filterRecord(canalConnectRecordList); + if (isDdlDatas(canalConnectRecordList)) { + doDdl(context, canalConnectRecordList, connectRecord); + } else if (sinkConfig.isGTIDMode()) { + doLoadWithGtid(context, sinkConfig, connectRecord); + } else { + canalConnectRecordList = DbLoadMerger.merge(canalConnectRecordList); + + DbLoadData loadData = new DbLoadData(); + doBefore(canalConnectRecordList, loadData); + + doLoad(context, sinkConfig, loadData, connectRecord); + + } + + } + } + + @Override + public Sink create() { + return new CanalSinkIncrementConnector(); + } + + private boolean isDdlDatas(List canalConnectRecordList) { + boolean result = false; + for (CanalConnectRecord canalConnectRecord : canalConnectRecordList) { + result |= canalConnectRecord.getEventType().isDdl(); + if (result && !canalConnectRecord.getEventType().isDdl()) { + throw new RuntimeException("ddl/dml can't be in one batch, it's may be a bug , pls submit issues."); + } + } + return result; + } + + private List filterRecord(List canalConnectRecordList) { + return canalConnectRecordList.stream() + .filter(record -> tableMgr.getTable(record.getSchemaName(), record.getTableName()) != null) + .collect(Collectors.toList()); + } + + private void doDdl(DbLoadContext context, List canalConnectRecordList, ConnectRecord connectRecord) { + for (final CanalConnectRecord record : canalConnectRecordList) { + try { + Boolean result = jdbcTemplate.execute(new StatementCallback() { + + public Boolean doInStatement(Statement stmt) throws SQLException, DataAccessException { + boolean result = true; + if (StringUtils.isNotEmpty(record.getDdlSchemaName())) { + result &= stmt.execute("use `" + record.getDdlSchemaName() + "`"); + } + result &= stmt.execute(record.getSql()); + return result; + } + }); + if (Boolean.TRUE.equals(result)) { + context.getProcessedRecords().add(record); + } else { + context.getFailedRecords().add(record); + } + } catch (Throwable e) { + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, e)); + throw new RuntimeException(e); + } + } + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + + private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { + SendExceptionContext sendExceptionContext = new SendExceptionContext(); + sendExceptionContext.setMessageId(record.getRecordId()); + sendExceptionContext.setCause(e); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + sendExceptionContext.setTopic(record.getExtension("topic")); + } + return sendExceptionContext; + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + + private void doBefore(List canalConnectRecordList, final DbLoadData loadData) { + for (final CanalConnectRecord record : canalConnectRecordList) { + boolean filter = interceptor.before(sinkConfig, record); + if (!filter) { + loadData.merge(record); + } + } + } + + private void doLoad(DbLoadContext context, CanalSinkIncrementConfig sinkConfig, DbLoadData loadData, ConnectRecord connectRecord) { + List> batchDatas = new ArrayList<>(); + for (TableLoadData tableData : loadData.getTables()) { + if (useBatch) { + batchDatas.addAll(split(tableData.getDeleteDatas())); + } else { + for (CanalConnectRecord data : tableData.getDeleteDatas()) { + batchDatas.add(Arrays.asList(data)); + } + } + } + + doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); + + batchDatas.clear(); + + for (TableLoadData tableData : loadData.getTables()) { + if (useBatch) { + batchDatas.addAll(split(tableData.getInsertDatas())); + batchDatas.addAll(split(tableData.getUpdateDatas())); + } else { + for (CanalConnectRecord data : tableData.getInsertDatas()) { + batchDatas.add(Arrays.asList(data)); + } + for (CanalConnectRecord data : tableData.getUpdateDatas()) { + batchDatas.add(Arrays.asList(data)); + } + } + } + + doTwoPhase(context, sinkConfig, batchDatas, true, connectRecord); + + batchDatas.clear(); + } + + private void doLoadWithGtid(DbLoadContext context, CanalSinkIncrementConfig sinkConfig, ConnectRecord connectRecord) { + int batchIndex = connectRecord.getExtension("batchIndex", Integer.class); + int totalBatches = connectRecord.getExtension("totalBatches", Integer.class); + List canalConnectRecordList = convertToCanalConnectRecord(connectRecord); + + String gtid = canalConnectRecordList.get(0).getCurrentGtid(); + GtidBatchManager.addBatch(gtid, batchIndex, totalBatches, canalConnectRecordList); + // check whether the batch is complete + if (GtidBatchManager.isComplete(gtid)) { + GtidBatch batch = GtidBatchManager.getGtidBatch(gtid); + List> totalRows = batch.getBatches(); + List filteredRows = new ArrayList<>(); + for (List canalConnectRecords : totalRows) { + canalConnectRecords = filterRecord(canalConnectRecords); + if (!CollectionUtils.isEmpty(canalConnectRecords)) { + for (final CanalConnectRecord record : canalConnectRecords) { + boolean filter = interceptor.before(sinkConfig, record); + filteredRows.add(record); + } + } + } + context.setGtid(gtid); + Future result = gtidSingleExecutor.submit(new DbLoadWorker(context, filteredRows, dbDialect, false, sinkConfig)); + Exception ex = null; + try { + ex = result.get(); + if (ex == null) { + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } catch (Exception e) { + ex = e; + } + Boolean skipException = sinkConfig.getSkipException(); + if (skipException != null && skipException) { + if (ex != null) { + // do skip + log.warn("skip exception will ack data : {} , caused by {}", + filteredRows, + ExceptionUtils.getFullStackTrace(ex)); + GtidBatchManager.removeGtidBatch(gtid); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } else { + if (ex != null) { + log.error("sink connector will shutdown by " + ex.getMessage(), ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); + gtidSingleExecutor.shutdown(); + System.exit(1); + } else { + GtidBatchManager.removeGtidBatch(gtid); + } + } + } else { + log.info("Batch received, waiting for other batches."); + // ack this record + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } + + private List convertToCanalConnectRecord(ConnectRecord connectRecord) { + List canalConnectRecordList; + try { + canalConnectRecordList = + JsonUtils.parseTypeReferenceObject((byte[]) connectRecord.getData(), new TypeReference>() { + }); + } catch (Exception e) { + log.error("Failed to parse the canalConnectRecords.", e); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, e)); + throw new RuntimeException("Failed to parse the canalConnectRecords.", e); + } + return canalConnectRecordList; + } + + private List> split(List records) { + List> result = new ArrayList<>(); + if (records == null || records.isEmpty()) { + return result; + } else { + int[] bits = new int[records.size()]; + for (int i = 0; i < bits.length; i++) { + while (i < bits.length && bits[i] == 1) { + i++; + } + + if (i >= bits.length) { + break; + } + + List batch = new ArrayList<>(); + bits[i] = 1; + batch.add(records.get(i)); + for (int j = i + 1; j < bits.length && batch.size() < batchSize; j++) { + if (bits[j] == 0 && canBatch(records.get(i), records.get(j))) { + batch.add(records.get(j)); + bits[j] = 1; + } + } + result.add(batch); + } + + return result; + } + } + + private boolean canBatch(CanalConnectRecord source, CanalConnectRecord target) { + return StringUtils.equals(source.getSchemaName(), + target.getSchemaName()) + && StringUtils.equals(source.getTableName(), target.getTableName()) + && StringUtils.equals(source.getSql(), target.getSql()); + } + + private void doTwoPhase(DbLoadContext context, CanalSinkIncrementConfig sinkConfig, List> totalRows, boolean canBatch, + ConnectRecord connectRecord) { + List> results = new ArrayList<>(); + for (List rows : totalRows) { + if (CollectionUtils.isEmpty(rows)) { + continue; + } + results.add(executor.submit(new DbLoadWorker(context, rows, dbDialect, canBatch, sinkConfig))); + } + + boolean partFailed = false; + for (Future result : results) { + Exception ex = null; + try { + ex = result.get(); + if (ex == null) { + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } catch (Exception e) { + ex = e; + } + + if (ex != null) { + log.warn("##load phase one failed!", ex); + partFailed = true; + } + } + + if (partFailed) { + List retryRecords = new ArrayList<>(); + for (List rows : totalRows) { + retryRecords.addAll(rows); + } + + context.getFailedRecords().clear(); + + Boolean skipException = sinkConfig.getSkipException(); + if (skipException != null && skipException) { + for (CanalConnectRecord retryRecord : retryRecords) { + DbLoadWorker worker = new DbLoadWorker(context, Arrays.asList(retryRecord), dbDialect, false, sinkConfig); + try { + Exception ex = worker.call(); + if (ex != null) { + // do skip + log.warn("skip exception for data : {} , caused by {}", + retryRecord, + ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } catch (Exception ex) { + // do skip + log.warn("skip exception for data : {} , caused by {}", + retryRecord, + ExceptionUtils.getFullStackTrace(ex)); + connectRecord.getCallback().onSuccess(convertToSendResult(connectRecord)); + } + } + } else { + DbLoadWorker worker = new DbLoadWorker(context, retryRecords, dbDialect, false, sinkConfig); + try { + Exception ex = worker.call(); + if (ex != null) { + throw ex; + } + } catch (Exception ex) { + log.error("##load phase two failed!", ex); + log.error("sink connector will shutdown by " + ex.getMessage(), ex); + connectRecord.getCallback().onException(buildSendExceptionContext(connectRecord, ex)); + executor.shutdown(); + System.exit(1); + } + } + } + } + + enum ExecuteResult { + SUCCESS, ERROR, RETRY + } + + class DbLoadWorker implements Callable { + + private final DbLoadContext context; + private final DbDialect dbDialect; + private final List records; + private final boolean canBatch; + + private final CanalSinkIncrementConfig sinkConfig; + + private final List allFailedRecords = new ArrayList<>(); + private final List allProcessedRecords = new ArrayList<>(); + private final List processedRecords = new ArrayList<>(); + private final List failedRecords = new ArrayList<>(); + + public DbLoadWorker(DbLoadContext context, List records, DbDialect dbDialect, boolean canBatch, + CanalSinkIncrementConfig sinkConfig) { + this.context = context; + this.records = records; + this.canBatch = canBatch; + this.dbDialect = dbDialect; + this.sinkConfig = sinkConfig; + } + + public Exception call() throws Exception { + try { + return doCall(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private Exception doCall() { + RuntimeException error = null; + ExecuteResult exeResult = null; + + if (sinkConfig.isGTIDMode()) { + int retryCount = 0; + final List toExecuteRecords = new ArrayList<>(); + try { + if (!CollectionUtils.isEmpty(failedRecords)) { + // if failedRecords not empty, make it retry + toExecuteRecords.addAll(failedRecords); + } else { + toExecuteRecords.addAll(records); + // add to failed record first, maybe get lob or datasource error + failedRecords.addAll(toExecuteRecords); + } + JdbcTemplate template = dbDialect.getJdbcTemplate(); + String sourceGtid = context.getGtid(); + if (StringUtils.isNotEmpty(sourceGtid) && !sinkConfig.isMariaDB()) { + String setMySQLGtid = "SET @@session.gtid_next = '" + sourceGtid + "';"; + template.execute(setMySQLGtid); + } else if (StringUtils.isNotEmpty(sourceGtid) && sinkConfig.isMariaDB()) { + throw new RuntimeException("unsupport gtid mode for mariaDB"); + } else { + log.error("gtid is empty in gtid mode"); + throw new RuntimeException("gtid is empty in gtid mode"); + } + + final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator(); + int affect = (Integer) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { + try { + failedRecords.clear(); + processedRecords.clear(); + int affect1 = 0; + for (CanalConnectRecord record : toExecuteRecords) { + int affects = template.update(record.getSql(), new PreparedStatementSetter() { + public void setValues(PreparedStatement ps) throws SQLException { + doPreparedStatement(ps, dbDialect, lobCreator, record); + } + }); + affect1 = affect1 + affects; + processStat(record, affects, false); + } + return affect1; + } catch (Exception e) { + // rollback + status.setRollbackOnly(); + throw new RuntimeException("Failed to executed", e); + } finally { + lobCreator.close(); + } + }); + + // reset gtid + if (sinkConfig.isMariaDB()) { + throw new RuntimeException("unsupport gtid mode for mariaDB"); + } else { + String resetMySQLGtid = "SET @@session.gtid_next = 'AUTOMATIC';"; + dbDialect.getJdbcTemplate().execute(resetMySQLGtid); + } + + error = null; + exeResult = ExecuteResult.SUCCESS; + } catch (DeadlockLoserDataAccessException ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.RETRY; + } catch (Throwable ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.ERROR; + } + + if (ExecuteResult.SUCCESS == exeResult) { + allFailedRecords.addAll(failedRecords); + allProcessedRecords.addAll(processedRecords); + failedRecords.clear(); + processedRecords.clear(); + } else if (ExecuteResult.RETRY == exeResult) { + retryCount = retryCount + 1; + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + int retry = 3; + if (retryCount >= retry) { + processFailedDatas(toExecuteRecords.size()); + throw new RuntimeException(String.format("execute retry %s times failed", retryCount), error); + } else { + try { + int retryWait = 3000; + int wait = retryCount * retryWait; + wait = Math.max(wait, retryWait); + Thread.sleep(wait); + } catch (InterruptedException ex) { + Thread.interrupted(); + processFailedDatas(toExecuteRecords.size()); + throw new RuntimeException(ex); + } + } + } else { + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + processFailedDatas(toExecuteRecords.size()); + throw error; + } + } else { + int index = 0; + while (index < records.size()) { + final List toExecuteRecords = new ArrayList<>(); + if (useBatch && canBatch) { + int end = Math.min(index + batchSize, records.size()); + toExecuteRecords.addAll(records.subList(index, end)); + index = end; + } else { + toExecuteRecords.add(records.get(index)); + index = index + 1; + } + + int retryCount = 0; + while (true) { + try { + if (!CollectionUtils.isEmpty(failedRecords)) { + toExecuteRecords.clear(); + toExecuteRecords.addAll(failedRecords); + } else { + failedRecords.addAll(toExecuteRecords); + } + + final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator(); + if (useBatch && canBatch) { + JdbcTemplate template = dbDialect.getJdbcTemplate(); + final String sql = toExecuteRecords.get(0).getSql(); + + int[] affects = new int[toExecuteRecords.size()]; + + affects = (int[]) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { + try { + failedRecords.clear(); + processedRecords.clear(); + int[] affects1 = template.batchUpdate(sql, new BatchPreparedStatementSetter() { + + public void setValues(PreparedStatement ps, int idx) throws SQLException { + doPreparedStatement(ps, dbDialect, lobCreator, toExecuteRecords.get(idx)); + } + + public int getBatchSize() { + return toExecuteRecords.size(); + } + }); + return affects1; + } catch (Exception e) { + // rollback + status.setRollbackOnly(); + throw new RuntimeException("Failed to execute batch with GTID", e); + } finally { + lobCreator.close(); + } + }); + + for (int i = 0; i < toExecuteRecords.size(); i++) { + assert affects != null; + processStat(toExecuteRecords.get(i), affects[i], true); + } + } else { + final CanalConnectRecord record = toExecuteRecords.get(0); + JdbcTemplate template = dbDialect.getJdbcTemplate(); + int affect = 0; + affect = (Integer) dbDialect.getTransactionTemplate().execute((TransactionCallback) status -> { + try { + failedRecords.clear(); + processedRecords.clear(); + int affect1 = template.update(record.getSql(), new PreparedStatementSetter() { + + public void setValues(PreparedStatement ps) throws SQLException { + doPreparedStatement(ps, dbDialect, lobCreator, record); + } + }); + return affect1; + } catch (Exception e) { + // rollback + status.setRollbackOnly(); + throw new RuntimeException("Failed to executed", e); + } finally { + lobCreator.close(); + } + }); + processStat(record, affect, false); + } + + error = null; + exeResult = ExecuteResult.SUCCESS; + } catch (DeadlockLoserDataAccessException ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.RETRY; + } catch (Throwable ex) { + error = new RuntimeException(ExceptionUtils.getFullStackTrace(ex)); + exeResult = ExecuteResult.ERROR; + } + + if (ExecuteResult.SUCCESS == exeResult) { + allFailedRecords.addAll(failedRecords); + allProcessedRecords.addAll(processedRecords); + failedRecords.clear(); + processedRecords.clear(); + break; // do next eventData + } else if (ExecuteResult.RETRY == exeResult) { + retryCount = retryCount + 1; + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + int retry = 3; + if (retryCount >= retry) { + processFailedDatas(index); + throw new RuntimeException(String.format("execute retry %s times failed", retryCount), error); + } else { + try { + int retryWait = 3000; + int wait = retryCount * retryWait; + wait = Math.max(wait, retryWait); + Thread.sleep(wait); + } catch (InterruptedException ex) { + Thread.interrupted(); + processFailedDatas(index); + throw new RuntimeException(ex); + } + } + } else { + processedRecords.clear(); + failedRecords.clear(); + failedRecords.addAll(toExecuteRecords); + processFailedDatas(index); + throw error; + } + } + } + } + + context.getFailedRecords().addAll(allFailedRecords); + context.getProcessedRecords().addAll(allProcessedRecords); + return null; + } + + private void doPreparedStatement(PreparedStatement ps, DbDialect dbDialect, LobCreator lobCreator, + CanalConnectRecord record) throws SQLException { + EventType type = record.getEventType(); + List columns = new ArrayList(); + if (type.isInsert()) { + columns.addAll(record.getColumns()); + columns.addAll(record.getKeys()); + } else if (type.isDelete()) { + columns.addAll(record.getKeys()); + } else if (type.isUpdate()) { + boolean existOldKeys = !CollectionUtils.isEmpty(record.getOldKeys()); + columns.addAll(record.getUpdatedColumns()); + columns.addAll(record.getKeys()); + if (existOldKeys) { + columns.addAll(record.getOldKeys()); + } + } + + for (int i = 0; i < columns.size(); i++) { + int paramIndex = i + 1; + EventColumn column = columns.get(i); + int sqlType = column.getColumnType(); + + Object param = null; + if (dbDialect instanceof MysqlDialect + && (sqlType == Types.TIME || sqlType == Types.TIMESTAMP || sqlType == Types.DATE)) { + param = column.getColumnValue(); + } else { + param = SqlUtils.stringToSqlValue(column.getColumnValue(), + sqlType, + false, + dbDialect.isEmptyStringNulled()); + } + + try { + switch (sqlType) { + case Types.CLOB: + lobCreator.setClobAsString(ps, paramIndex, (String) param); + break; + + case Types.BLOB: + lobCreator.setBlobAsBytes(ps, paramIndex, (byte[]) param); + break; + case Types.TIME: + case Types.TIMESTAMP: + case Types.DATE: + if (dbDialect instanceof MysqlDialect) { + ps.setObject(paramIndex, param); + } else { + StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); + } + break; + case Types.BIT: + if (dbDialect instanceof MysqlDialect) { + StatementCreatorUtils.setParameterValue(ps, paramIndex, Types.DECIMAL, null, param); + } else { + StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); + } + break; + default: + StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param); + break; + } + } catch (SQLException ex) { + log.error("## SetParam error , [pairId={}, sqltype={}, value={}]", + record.getPairId(), sqlType, param); + throw ex; + } + } + } + + private void processStat(CanalConnectRecord record, int affect, boolean batch) { + if (batch && (affect < 1 && affect != Statement.SUCCESS_NO_INFO)) { + failedRecords.add(record); + } else if (!batch && affect < 1) { + failedRecords.add(record); + } else { + processedRecords.add(record); + // this.processStat(record, context); + } + } + + private void processFailedDatas(int index) { + allFailedRecords.addAll(failedRecords); + context.getFailedRecords().addAll(allFailedRecords); + for (; index < records.size(); index++) { + context.getFailedRecords().add(records.get(index)); + } + allProcessedRecords.addAll(processedRecords); + context.getProcessedRecords().addAll(allProcessedRecords); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java new file mode 100644 index 0000000000..5a6ceb7c3f --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/EntryParser.java @@ -0,0 +1,332 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source; + +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceIncrementConfig; +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.model.EventColumn; +import org.apache.eventmesh.connector.canal.model.EventColumnIndexComparable; +import org.apache.eventmesh.connector.canal.model.EventType; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; + +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.util.CollectionUtils; + +import com.alibaba.otter.canal.protocol.CanalEntry; +import com.alibaba.otter.canal.protocol.CanalEntry.Column; +import com.alibaba.otter.canal.protocol.CanalEntry.Entry; +import com.alibaba.otter.canal.protocol.CanalEntry.RowChange; +import com.alibaba.otter.canal.protocol.CanalEntry.RowData; + +import lombok.extern.slf4j.Slf4j; + +/** + * data object parse + */ +@Slf4j +public class EntryParser { + + public static Map> parse(CanalSourceIncrementConfig sourceConfig, List datas, + RdbTableMgr tables) { + List recordList = new ArrayList<>(); + List transactionDataBuffer = new ArrayList<>(); + // need check weather the entry is loopback + boolean needSync; + Map> recordMap = new HashMap<>(); + try { + for (Entry entry : datas) { + switch (entry.getEntryType()) { + case ROWDATA: + RowChange rowChange = RowChange.parseFrom(entry.getStoreValue()); + // don't support gtid for mariadb + if (sourceConfig.getServerUUID() != null && sourceConfig.isGTIDMode() && !sourceConfig.isMariaDB()) { + if (checkGtidForEntry(entry, sourceConfig)) { + transactionDataBuffer.add(entry); + } + } else { + // if not gtid mode, need check weather the entry is loopback by specified column value + needSync = checkNeedSync(sourceConfig, rowChange); + if (needSync) { + transactionDataBuffer.add(entry); + } + } + break; + case TRANSACTIONEND: + parseRecordListWithEntryBuffer(sourceConfig, recordList, transactionDataBuffer, tables); + if (!recordList.isEmpty()) { + recordMap.put(entry.getHeader().getLogfileOffset(), recordList); + } + transactionDataBuffer.clear(); + break; + default: + break; + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } + return recordMap; + } + + private static boolean checkGtidForEntry(Entry entry, CanalSourceIncrementConfig sourceConfig) { + String currentGtid = entry.getHeader().getPropsList().get(0).getValue(); + return currentGtid.contains(sourceConfig.getServerUUID()); + } + + private static void parseRecordListWithEntryBuffer(CanalSourceIncrementConfig sourceConfig, + List recordList, + List transactionDataBuffer, RdbTableMgr tables) { + for (Entry bufferEntry : transactionDataBuffer) { + List recordParsedList = internParse(sourceConfig, bufferEntry, tables); + if (CollectionUtils.isEmpty(recordParsedList)) { + continue; + } + long totalSize = bufferEntry.getHeader().getEventLength(); + long eachSize = totalSize / recordParsedList.size(); + for (CanalConnectRecord record : recordParsedList) { + if (record == null) { + continue; + } + record.setSize(eachSize); + recordList.add(record); + } + } + } + + private static boolean checkNeedSync(CanalSourceIncrementConfig sourceConfig, RowChange rowChange) { + Column markedColumn = null; + CanalEntry.EventType eventType = rowChange.getEventType(); + if (eventType.equals(CanalEntry.EventType.DELETE)) { + markedColumn = getColumnIgnoreCase(rowChange.getRowDatas(0).getBeforeColumnsList(), + sourceConfig.getNeedSyncMarkTableColumnName()); + } else if (eventType.equals(CanalEntry.EventType.INSERT) || eventType.equals(CanalEntry.EventType.UPDATE)) { + markedColumn = getColumnIgnoreCase(rowChange.getRowDatas(0).getAfterColumnsList(), + sourceConfig.getNeedSyncMarkTableColumnName()); + } + if (markedColumn != null) { + return StringUtils.equalsIgnoreCase(markedColumn.getValue(), + sourceConfig.getNeedSyncMarkTableColumnValue()); + } + return false; + } + + private static Column getColumnIgnoreCase(List columns, String columName) { + for (Column column : columns) { + if (column.getName().equalsIgnoreCase(columName)) { + return column; + } + } + return null; + } + + private static List internParse(CanalSourceIncrementConfig sourceConfig, Entry entry, + RdbTableMgr tableMgr) { + String schemaName = entry.getHeader().getSchemaName(); + String tableName = entry.getHeader().getTableName(); + if (tableMgr.getTable(schemaName, tableName) == null) { + return null; + } + + RowChange rowChange = null; + try { + rowChange = RowChange.parseFrom(entry.getStoreValue()); + } catch (Exception e) { + throw new RuntimeException("parser of canal-event has an error , data:" + entry.toString(), e); + } + + if (rowChange == null) { + return null; + } + + EventType eventType = EventType.valueOf(rowChange.getEventType().name()); + + if (eventType.isQuery()) { + return null; + } + + if (eventType.isDdl()) { + log.warn("unsupported ddl event type: {}", eventType); + return null; + } + + List recordList = new ArrayList<>(); + for (RowData rowData : rowChange.getRowDatasList()) { + CanalConnectRecord record = internParse(sourceConfig, entry, rowChange, rowData); + recordList.add(record); + } + + return recordList; + } + + private static CanalConnectRecord internParse(CanalSourceIncrementConfig canalSourceConfig, Entry entry, + RowChange rowChange, RowData rowData) { + CanalConnectRecord canalConnectRecord = new CanalConnectRecord(); + canalConnectRecord.setTableName(entry.getHeader().getTableName()); + canalConnectRecord.setSchemaName(entry.getHeader().getSchemaName()); + canalConnectRecord.setEventType(EventType.valueOf(rowChange.getEventType().name())); + canalConnectRecord.setExecuteTime(entry.getHeader().getExecuteTime()); + canalConnectRecord.setJournalName(entry.getHeader().getLogfileName()); + canalConnectRecord.setBinLogOffset(entry.getHeader().getLogfileOffset()); + // if enabled gtid mode, gtid not null + if (canalSourceConfig.isGTIDMode()) { + if (canalSourceConfig.isMariaDB()) { + String currentGtid = entry.getHeader().getGtid(); + canalConnectRecord.setGtid(currentGtid); + canalConnectRecord.setCurrentGtid(currentGtid); + } else { + String currentGtid = entry.getHeader().getPropsList().get(0).getValue(); + String gtidRange = replaceGtidRange(entry.getHeader().getGtid(), currentGtid, canalSourceConfig.getServerUUID()); + canalConnectRecord.setGtid(gtidRange); + canalConnectRecord.setCurrentGtid(currentGtid); + } + } + + EventType eventType = canalConnectRecord.getEventType(); + + List beforeColumns = rowData.getBeforeColumnsList(); + List afterColumns = rowData.getAfterColumnsList(); + + boolean isRowMode = canalSourceConfig.getSyncMode().isRow(); + + Map keyColumns = new LinkedHashMap<>(); + Map oldKeyColumns = new LinkedHashMap<>(); + Map notKeyColumns = new LinkedHashMap<>(); + + if (eventType.isInsert()) { + for (Column column : afterColumns) { + if (column.getIsKey()) { + keyColumns.put(column.getName(), copyEventColumn(column, true)); + } else { + notKeyColumns.put(column.getName(), copyEventColumn(column, true)); + } + } + } else if (eventType.isDelete()) { + for (Column column : beforeColumns) { + if (column.getIsKey()) { + keyColumns.put(column.getName(), copyEventColumn(column, true)); + } else { + notKeyColumns.put(column.getName(), copyEventColumn(column, true)); + } + } + } else if (eventType.isUpdate()) { + for (Column column : beforeColumns) { + if (column.getIsKey()) { + oldKeyColumns.put(column.getName(), copyEventColumn(column, true)); + keyColumns.put(column.getName(), copyEventColumn(column, true)); + } else { + if (isRowMode && entry.getHeader().getSourceType() == CanalEntry.Type.ORACLE) { + notKeyColumns.put(column.getName(), copyEventColumn(column, true)); + } + } + } + for (Column column : afterColumns) { + if (column.getIsKey()) { + keyColumns.put(column.getName(), copyEventColumn(column, true)); + } else if (isRowMode || entry.getHeader().getSourceType() == CanalEntry.Type.ORACLE + || column.getUpdated()) { + + boolean isUpdate = true; + if (entry.getHeader().getSourceType() == CanalEntry.Type.MYSQL) { + isUpdate = column.getUpdated(); + } + + notKeyColumns.put(column.getName(), copyEventColumn(column, isUpdate)); + } + } + + if (entry.getHeader().getSourceType() == CanalEntry.Type.ORACLE) { + checkUpdateKeyColumns(oldKeyColumns, keyColumns); + } + } + + List keys = new ArrayList<>(keyColumns.values()); + List oldKeys = new ArrayList<>(oldKeyColumns.values()); + List columns = new ArrayList<>(notKeyColumns.values()); + + keys.sort(new EventColumnIndexComparable()); + oldKeys.sort(new EventColumnIndexComparable()); + columns.sort(new EventColumnIndexComparable()); + if (!keyColumns.isEmpty()) { + canalConnectRecord.setKeys(keys); + if (canalConnectRecord.getEventType().isUpdate() && !oldKeys.equals(keys)) { + canalConnectRecord.setOldKeys(oldKeys); + } + canalConnectRecord.setColumns(columns); + } else { + throw new RuntimeException("this row data has no pks , entry: " + entry + " and rowData: " + + rowData); + } + + return canalConnectRecord; + } + + public static String replaceGtidRange(String gtid, String currentGtid, String serverUUID) { + String[] gtidRangeArray = gtid.split(","); + for (int i = 0; i < gtidRangeArray.length; i++) { + String gtidRange = gtidRangeArray[i]; + if (gtidRange.startsWith(serverUUID)) { + gtidRangeArray[i] = gtidRange.replaceFirst("\\d+$", currentGtid.split(":")[1]); + } + } + return String.join(",", gtidRangeArray); + } + + private static void checkUpdateKeyColumns(Map oldKeyColumns, + Map keyColumns) { + if (oldKeyColumns.isEmpty()) { + return; + } + if (keyColumns.size() > oldKeyColumns.size()) { + return; + } + + if (keyColumns.isEmpty()) { + keyColumns.putAll(oldKeyColumns); + return; + } + + if (oldKeyColumns.size() != keyColumns.size()) { + for (String oldKey : oldKeyColumns.keySet()) { + if (keyColumns.get(oldKey) == null) { + keyColumns.put(oldKey, oldKeyColumns.get(oldKey)); + } + } + } + } + + private static EventColumn copyEventColumn(Column column, boolean isUpdate) { + EventColumn eventColumn = new EventColumn(); + eventColumn.setIndex(column.getIndex()); + eventColumn.setKey(column.getIsKey()); + eventColumn.setNull(column.getIsNull()); + eventColumn.setColumnName(column.getName()); + eventColumn.setColumnValue(column.getValue()); + eventColumn.setUpdate(isUpdate); + eventColumn.setColumnType(column.getSqlType()); + + return eventColumn; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java new file mode 100644 index 0000000000..c0b2063d28 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalFullProducer.java @@ -0,0 +1,403 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalMySQLType; +import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbColumnDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.Constants; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLColumnDef; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordPartition; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.SqlUtils; +import org.apache.eventmesh.connector.canal.source.position.TableFullPosition; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.LockSupport; + +import javax.sql.DataSource; + +import lombok.extern.slf4j.Slf4j; + + +@Slf4j +public class CanalFullProducer { + private BlockingQueue> queue; + private final DataSource dataSource; + private final MySQLTableDef tableDefinition; + private final TableFullPosition position; + private static final int LIMIT = 2048; + private final int flushSize; + private final AtomicReference choosePrimaryKey = new AtomicReference<>(null); + private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd"); + private static final DateTimeFormatter DATE_STAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + + + public CanalFullProducer(BlockingQueue> queue, DataSource dataSource, + MySQLTableDef tableDefinition, TableFullPosition position, int flushSize) { + this.queue = queue; + this.dataSource = dataSource; + this.tableDefinition = tableDefinition; + this.position = position; + this.flushSize = flushSize; + } + + public void choosePrimaryKey() { + for (RdbColumnDefinition col : tableDefinition.getColumnDefinitions().values()) { + if (position.getCurPrimaryKeyCols().get(col.getName()) != null) { + // random choose the first primary key from the table + choosePrimaryKey.set(col.getName()); + log.info("schema [{}] table [{}] choose primary key [{}]", tableDefinition.getSchemaName(), tableDefinition.getTableName(), + col.getName()); + return; + } + } + throw new EventMeshException("illegal: can't pick any primary key"); + } + + + public void start(AtomicBoolean flag) { + choosePrimaryKey(); + // used to page query + boolean isFirstSelect = true; + List> rows = new LinkedList<>(); + while (flag.get()) { + String scanSql = generateScanSql(isFirstSelect); + log.info("scan sql is [{}] , cur position [{}]", scanSql, JsonUtils.toJSONString(position.getCurPrimaryKeyCols())); + + try (Connection connection = dataSource.getConnection(); PreparedStatement statement = + connection.prepareStatement(scanSql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)) { + statement.setFetchSize(Integer.MIN_VALUE); + setPrepareStatementValue(statement); + try (ResultSet resultSet = statement.executeQuery()) { + Map lastCol = null; + while (flag.get() && resultSet.next()) { + Map columnValues = new LinkedHashMap<>(); + for (Map.Entry col : + tableDefinition.getColumnDefinitions().entrySet()) { + columnValues.put(col.getKey(), readColumn(resultSet, col.getKey(), + col.getValue().getType())); + } + lastCol = columnValues; + rows.add(lastCol); + if (rows.size() < flushSize) { + continue; + } + refreshPosition(lastCol); + // may be not reach + commitConnectRecord(rows); + rows = new LinkedList<>(); + } + + if (lastCol == null || checkIsScanFinish(lastCol)) { + log.info("full scan db [{}] table [{}] finish", tableDefinition.getSchemaName(), + tableDefinition.getTableName()); + // commit the last record if rows.size() < flushSize + commitConnectRecord(rows); + return; + } + refreshPosition(lastCol); + } catch (InterruptedException ignore) { + log.info("full scan db [{}] table [{}] interrupted", tableDefinition.getSchemaName(), + tableDefinition.getTableName()); + Thread.currentThread().interrupt(); + return; + } + } catch (SQLException e) { + log.error("full source process schema [{}] table [{}] catch SQLException fail", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + LockSupport.parkNanos(3000 * 1000L); + } catch (Exception e) { + log.error("full source process schema [{}] table [{}] catch unknown exception", tableDefinition.getSchemaName(), + tableDefinition.getTableName(), e); + return; + } + if (isFirstSelect) { + isFirstSelect = false; + } + } + } + + private void commitConnectRecord(List> rows) throws InterruptedException { + if (rows == null || rows.isEmpty()) { + return; + } + JobRdbFullPosition jobRdbFullPosition = new JobRdbFullPosition(); + jobRdbFullPosition.setPrimaryKeyRecords(JsonUtils.toJSONString(position)); + jobRdbFullPosition.setTableName(tableDefinition.getTableName()); + jobRdbFullPosition.setSchema(tableDefinition.getSchemaName()); + CanalFullRecordOffset offset = new CanalFullRecordOffset(); + offset.setPosition(jobRdbFullPosition); + CanalFullRecordPartition partition = new CanalFullRecordPartition(); + ArrayList records = new ArrayList<>(); + byte[] rowsData = JsonUtils.toJSONString(rows).getBytes(StandardCharsets.UTF_8); + records.add(new ConnectRecord(partition, offset, System.currentTimeMillis(), rowsData)); + queue.put(records); + } + + private boolean checkIsScanFinish(Map lastCol) { + Object lastPrimaryValue = lastCol.get(choosePrimaryKey.get()); + Object maxPrimaryValue = position.getMaxPrimaryKeyCols().get(choosePrimaryKey.get()); + if (lastPrimaryValue instanceof Number) { + BigDecimal last = new BigDecimal(String.valueOf(lastPrimaryValue)); + BigDecimal max = + new BigDecimal(String.valueOf(maxPrimaryValue)); + return last.compareTo(max) > 0; + } + if (lastPrimaryValue instanceof Comparable) { + return ((Comparable) lastPrimaryValue).compareTo(maxPrimaryValue) > 0; + } + return false; + } + + public Object readColumn(ResultSet rs, String col, CanalMySQLType colType) throws Exception { + if (col == null || rs.wasNull()) { + return null; + } + switch (colType) { + case TINYINT: + case SMALLINT: + case MEDIUMINT: + case INT: + Long valueLong = rs.getLong(col); + if (valueLong.compareTo((long) Integer.MAX_VALUE) > 0) { + return valueLong; + } + return valueLong.intValue(); + case BIGINT: + String v = rs.getString(col); + if (v == null) { + return null; + } + BigDecimal valueBigInt = new BigDecimal(v); + if (valueBigInt.compareTo(BigDecimal.valueOf(Long.MAX_VALUE)) > 0) { + return valueBigInt; + } + return valueBigInt.longValue(); + case FLOAT: + case DOUBLE: + case DECIMAL: + return rs.getBigDecimal(col); + case DATE: + return rs.getObject(col, LocalDate.class); + case TIME: + return rs.getObject(col, LocalTime.class); + case DATETIME: + case TIMESTAMP: + return rs.getObject(col, LocalDateTime.class); + case YEAR: + return rs.getInt(col); + case CHAR: + case VARCHAR: + case TINYTEXT: + case TEXT: + case MEDIUMTEXT: + case LONGTEXT: + case ENUM: + case SET: + case JSON: + return rs.getString(col); + case BIT: + case BINARY: + case VARBINARY: + case TINYBLOB: + case BLOB: + case MEDIUMBLOB: + case LONGBLOB: + return rs.getBytes(col); + case GEOMETRY: + case GEOMETRY_COLLECTION: + case GEOM_COLLECTION: + case POINT: + case LINESTRING: + case POLYGON: + case MULTIPOINT: + case MULTILINESTRING: + case MULTIPOLYGON: + byte[] geo = rs.getBytes(col); + if (geo == null) { + return null; + } + return SqlUtils.toGeometry(geo); + default: + return rs.getObject(col); + } + } + + + private void refreshPosition(Map lastCol) { + Map nextPosition = new LinkedHashMap<>(); + for (Map.Entry entry : position.getCurPrimaryKeyCols().entrySet()) { + nextPosition.put(entry.getKey(), lastCol.get(entry.getKey())); + } + position.setCurPrimaryKeyCols(nextPosition); + } + + private void setPrepareStatementValue(PreparedStatement statement) throws SQLException { + String colName = choosePrimaryKey.get(); + if (colName == null) { + return; + } + RdbColumnDefinition columnDefinition = tableDefinition.getColumnDefinitions().get(colName); + Object value = position.getCurPrimaryKeyCols().get(colName); + String str; + switch (columnDefinition.getJdbcType()) { + case BIT: + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + statement.setBigDecimal(1, new BigDecimal(String.valueOf(value))); + break; + case DECIMAL: + case FLOAT: + case DOUBLE: + case NUMERIC: + statement.setDouble(1, new BigDecimal(String.valueOf(value)).doubleValue()); + break; + case CHAR: + case VARCHAR: + case LONGNVARCHAR: + case NCHAR: + case NVARCHAR: + case LONGVARCHAR: + case CLOB: + case NCLOB: + statement.setString(1, String.valueOf(value)); + break; + case BLOB: + case VARBINARY: + case BINARY: + str = String.valueOf(value); + String hexStr = str; + if (str.startsWith("0x")) { + hexStr = str.substring(str.indexOf("0x")); + } + byte[] bytes = SqlUtils.hex2bytes(hexStr); + statement.setBytes(1, bytes); + break; + case DATE: + Instant d; + if (value instanceof Long) { + Long val = (Long) value; + d = Instant.ofEpochMilli(val); + str = d.atZone(ZoneId.systemDefault()).toLocalDateTime().format(DATE_FORMATTER); + } else if (value instanceof Integer) { + Integer val = (Integer) value; + d = Instant.ofEpochMilli((long) val); + str = d.atZone(ZoneId.systemDefault()).toLocalDateTime().format(DATE_FORMATTER); + } else if (value instanceof String) { + str = (String) value; + } else { + if (!(value instanceof LocalDate)) { + throw new IllegalArgumentException("unsupported date class type:" + value.getClass().getSimpleName()); + } + str = ((LocalDate) value).format(DATE_FORMATTER); + } + statement.setString(1, str); + break; + case TIMESTAMP: + if (value instanceof String) { + str = (String) value; + } else { + if (!(value instanceof LocalDateTime)) { + throw new IllegalArgumentException("unsupported timestamp class type:" + value.getClass().getSimpleName()); + } + str = ((LocalDateTime) value).format(DATE_STAMP_FORMATTER); + } + statement.setString(1, str); + break; + default: + throw new EventMeshException(String.format("not support the primary key type [%s]", value.getClass())); + } + } + + + private void generateQueryColumnsSql(StringBuilder builder, Collection rdbColDefs) { + if (rdbColDefs == null || rdbColDefs.isEmpty()) { + builder.append("*"); + return; + } + boolean first = true; + for (RdbColumnDefinition colDef : rdbColDefs) { + if (first) { + first = false; + } else { + builder.append(","); + } + builder.append(Constants.MySQLQuot); + builder.append(colDef.getName()); + builder.append(Constants.MySQLQuot); + } + } + + private String generateScanSql(boolean isFirst) { + StringBuilder builder = new StringBuilder(); + builder.append("select "); + generateQueryColumnsSql(builder, tableDefinition.getColumnDefinitions().values()); + builder.append(" from "); + builder.append(Constants.MySQLQuot); + builder.append(tableDefinition.getSchemaName()); + builder.append(Constants.MySQLQuot); + builder.append("."); + builder.append(Constants.MySQLQuot); + builder.append(tableDefinition.getTableName()); + builder.append(Constants.MySQLQuot); + buildWhereSql(builder, isFirst); + builder.append(" limit " + LIMIT); + return builder.toString(); + } + + private void buildWhereSql(StringBuilder builder, boolean isEquals) { + builder.append(" where ") + .append(Constants.MySQLQuot) + .append(choosePrimaryKey.get()) + .append(Constants.MySQLQuot); + if (isEquals) { + builder.append(" >= ? "); + } else { + builder.append(" > ? "); + } + builder.append(" order by ").append(Constants.MySQLQuot).append(choosePrimaryKey.get()).append(Constants.MySQLQuot) + .append(" asc "); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java new file mode 100644 index 0000000000..bd85f03240 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceCheckConnector.java @@ -0,0 +1,190 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.AbstractComponent; +import org.apache.eventmesh.common.EventMeshThreadFactory; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.source.position.CanalFullPositionMgr; +import org.apache.eventmesh.connector.canal.source.position.TableFullPosition; +import org.apache.eventmesh.connector.canal.source.table.RdbSimpleTable; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSourceCheckConnector extends AbstractComponent implements Source, ConnectorCreateService { + + private CanalSourceFullConfig config; + private CanalFullPositionMgr positionMgr; + private RdbTableMgr tableMgr; + private ThreadPoolExecutor executor; + private BlockingQueue> queue; + private final AtomicBoolean flag = new AtomicBoolean(true); + private long maxPollWaitTime; + + @Override + protected void run() throws Exception { + this.tableMgr.start(); + this.positionMgr.start(); + if (positionMgr.isFinished()) { + log.info("connector [{}] has finished the job", config.getSourceConnectorConfig().getConnectorName()); + return; + } + executor = new ThreadPoolExecutor(config.getParallel(), config.getParallel(), 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(), new EventMeshThreadFactory("canal-source-full")); + List producers = new LinkedList<>(); + if (config.getSourceConnectorConfig().getDatabases() != null) { + for (RdbDBDefinition db : config.getSourceConnectorConfig().getDatabases()) { + for (RdbTableDefinition table : db.getTables()) { + try { + log.info("it will create producer of db [{}] table [{}]", db.getSchemaName(), table.getTableName()); + RdbSimpleTable simpleTable = new RdbSimpleTable(db.getSchemaName(), table.getTableName()); + JobRdbFullPosition position = positionMgr.getPosition(simpleTable); + if (position == null) { + throw new EventMeshException(String.format("db [%s] table [%s] have none position info", + db.getSchemaName(), table.getTableName())); + } + RdbTableDefinition tableDefinition = tableMgr.getTable(simpleTable); + if (tableDefinition == null) { + throw new EventMeshException(String.format("db [%s] table [%s] have none table definition info", + db.getSchemaName(), table.getTableName())); + } + + producers.add(new CanalFullProducer(queue, DatabaseConnection.sourceDataSource, (MySQLTableDef) tableDefinition, + JsonUtils.parseObject(position.getPrimaryKeyRecords(), TableFullPosition.class), + config.getFlushSize())); + } catch (Exception e) { + log.error("create schema [{}] table [{}] producers fail", db.getSchemaName(), + table.getTableName(), e); + } + } + } + } + producers.forEach(p -> executor.execute(() -> p.start(flag))); + } + + @Override + protected void shutdown() throws Exception { + flag.set(false); + if (!executor.isShutdown()) { + executor.shutdown(); + try { + if (!executor.awaitTermination(5, TimeUnit.SECONDS)) { + log.warn("wait thread pool shutdown timeout, it will shutdown now"); + executor.shutdownNow(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + log.info("shutdown thread pool fail"); + } + } + if (DatabaseConnection.sourceDataSource != null) { + DatabaseConnection.sourceDataSource.close(); + log.info("data source has been closed"); + } + } + + @Override + public Source create() { + return new CanalSourceCheckConnector(); + } + + @Override + public Class configClass() { + return CanalSourceFullConfig.class; + } + + @Override + public void init(Config config) throws Exception { + this.config = (CanalSourceFullConfig) config; + init(); + } + + private void init() { + DatabaseConnection.sourceConfig = this.config.getSourceConnectorConfig(); + DatabaseConnection.initSourceConnection(); + this.tableMgr = new RdbTableMgr(config.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); + this.positionMgr = new CanalFullPositionMgr(config, tableMgr); + this.maxPollWaitTime = config.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(config.getPollConfig().getCapacity()); + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + this.config = (CanalSourceFullConfig) sourceConnectorContext.getSourceConfig(); + init(); + } + + @Override + public void commit(ConnectRecord record) { + // nothing + } + + @Override + public String name() { + return this.config.getSourceConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public List poll() { + while (flag.get()) { + try { + List records = queue.poll(maxPollWaitTime, TimeUnit.MILLISECONDS); + if (records == null || records.isEmpty()) { + continue; + } + return records; + } catch (InterruptedException ignore) { + Thread.currentThread().interrupt(); + log.info("[{}] thread interrupted", this.getClass()); + return null; + } + } + log.info("[{}] life flag is stop, so return null", this.getClass()); + return null; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java new file mode 100644 index 0000000000..e24301ae07 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceConnector.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSourceConnector implements Source, ConnectorCreateService { + + private CanalSourceConfig sourceConfig; + + private Source source; + + @Override + public Class configClass() { + return CanalSourceConfig.class; + } + + @Override + public void init(Config config) throws Exception { + // init config for canal source connector + this.sourceConfig = (CanalSourceConfig) config; + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + if (sourceConnectorContext.getJobType().equals(JobType.FULL)) { + this.source = new CanalSourceFullConnector(); + } else if (sourceConnectorContext.getJobType().equals(JobType.INCREASE)) { + this.source = new CanalSourceIncrementConnector(); + } else if (sourceConnectorContext.getJobType().equals(JobType.CHECK)) { + this.source = new CanalSourceCheckConnector(); + } else { + throw new RuntimeException("unsupported job type " + sourceConnectorContext.getJobType()); + } + this.source.init(sourceConnectorContext); + } + + + @Override + public void start() throws Exception { + this.source.start(); + } + + + @Override + public void commit(ConnectRecord record) { + this.source.commit(record); + } + + @Override + public String name() { + return this.source.name(); + } + + @Override + public void onException(ConnectRecord record) { + this.source.onException(record); + } + + @Override + public void stop() throws Exception { + this.source.stop(); + } + + @Override + public List poll() { + return this.source.poll(); + } + + @Override + public Source create() { + return new CanalSourceConnector(); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java new file mode 100644 index 0000000000..09e2e0dcf7 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceFullConnector.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.AbstractComponent; +import org.apache.eventmesh.common.EventMeshThreadFactory; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.source.position.CanalFullPositionMgr; +import org.apache.eventmesh.connector.canal.source.position.TableFullPosition; +import org.apache.eventmesh.connector.canal.source.table.RdbSimpleTable; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSourceFullConnector extends AbstractComponent implements Source { + + private CanalSourceFullConfig config; + private CanalFullPositionMgr positionMgr; + private RdbTableMgr tableMgr; + private ThreadPoolExecutor executor; + private BlockingQueue> queue; + private final AtomicBoolean flag = new AtomicBoolean(true); + private long maxPollWaitTime; + + @Override + protected void run() throws Exception { + this.tableMgr.start(); + this.positionMgr.start(); + if (positionMgr.isFinished()) { + log.info("connector [{}] has finished the job", config.getSourceConnectorConfig().getConnectorName()); + return; + } + executor = new ThreadPoolExecutor(config.getParallel(), config.getParallel(), 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(), new EventMeshThreadFactory("canal-source-full")); + List producers = new LinkedList<>(); + if (config.getSourceConnectorConfig().getDatabases() != null) { + for (RdbDBDefinition db : config.getSourceConnectorConfig().getDatabases()) { + for (RdbTableDefinition table : db.getTables()) { + try { + log.info("it will create producer of db [{}] table [{}]", db.getSchemaName(), table.getTableName()); + RdbSimpleTable simpleTable = new RdbSimpleTable(db.getSchemaName(), table.getTableName()); + JobRdbFullPosition position = positionMgr.getPosition(simpleTable); + if (position == null) { + throw new EventMeshException(String.format("db [%s] table [%s] have none position info", + db.getSchemaName(), table.getTableName())); + } + RdbTableDefinition tableDefinition = tableMgr.getTable(simpleTable); + if (tableDefinition == null) { + throw new EventMeshException(String.format("db [%s] table [%s] have none table definition info", + db.getSchemaName(), table.getTableName())); + } + + producers.add(new CanalFullProducer(queue, DatabaseConnection.sourceDataSource, (MySQLTableDef) tableDefinition, + JsonUtils.parseObject(position.getPrimaryKeyRecords(), TableFullPosition.class), + config.getFlushSize())); + } catch (Exception e) { + log.error("create schema [{}] table [{}] producers fail", db.getSchemaName(), + table.getTableName(), e); + } + } + } + } + producers.forEach(p -> executor.execute(() -> p.start(flag))); + } + + @Override + protected void shutdown() throws Exception { + flag.set(false); + if (!executor.isShutdown()) { + executor.shutdown(); + try { + if (!executor.awaitTermination(5, TimeUnit.SECONDS)) { + log.warn("wait thread pool shutdown timeout, it will shutdown now"); + executor.shutdownNow(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + log.info("shutdown thread pool fail"); + } + } + if (DatabaseConnection.sourceDataSource != null) { + DatabaseConnection.sourceDataSource.close(); + log.info("data source has been closed"); + } + } + + @Override + public Class configClass() { + return CanalSourceFullConfig.class; + } + + @Override + public void init(Config config) throws Exception { + this.config = (CanalSourceFullConfig) config; + init(); + } + + private void init() { + DatabaseConnection.sourceConfig = this.config.getSourceConnectorConfig(); + DatabaseConnection.initSourceConnection(); + this.tableMgr = new RdbTableMgr(config.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); + this.positionMgr = new CanalFullPositionMgr(config, tableMgr); + this.maxPollWaitTime = config.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(config.getPollConfig().getCapacity()); + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + CanalSourceConfig canalSourceConfig = (CanalSourceConfig) sourceConnectorContext.getSourceConfig(); + this.config = ConfigUtil.parse(canalSourceConfig.getSourceConfig(), CanalSourceFullConfig.class); + init(); + } + + @Override + public void commit(ConnectRecord record) { + // nothing + } + + @Override + public String name() { + return this.config.getSourceConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public List poll() { + while (flag.get()) { + try { + List records = queue.poll(maxPollWaitTime, TimeUnit.MILLISECONDS); + if (records == null || records.isEmpty()) { + continue; + } + return records; + } catch (InterruptedException ignore) { + Thread.currentThread().interrupt(); + log.info("[{}] thread interrupted", this.getClass()); + return null; + } + } + log.info("[{}] life flag is stop, so return null", this.getClass()); + return null; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java new file mode 100644 index 0000000000..4f7041b478 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/connector/CanalSourceIncrementConnector.java @@ -0,0 +1,383 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.connector; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceIncrementConfig; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordOffset; +import org.apache.eventmesh.common.remote.offset.canal.CanalRecordPartition; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.CanalConnectRecord; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.source.EntryParser; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import org.apache.commons.lang3.StringUtils; + +import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.LockSupport; + +import com.alibaba.otter.canal.instance.core.CanalInstance; +import com.alibaba.otter.canal.instance.core.CanalInstanceGenerator; +import com.alibaba.otter.canal.instance.manager.CanalInstanceWithManager; +import com.alibaba.otter.canal.instance.manager.model.Canal; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.ClusterMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.HAMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.IndexMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.MetaMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.RunMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.SourcingType; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.StorageMode; +import com.alibaba.otter.canal.parse.CanalEventParser; +import com.alibaba.otter.canal.parse.ha.CanalHAController; +import com.alibaba.otter.canal.parse.inbound.mysql.MysqlEventParser; +import com.alibaba.otter.canal.protocol.CanalEntry.Entry; +import com.alibaba.otter.canal.protocol.ClientIdentity; +import com.alibaba.otter.canal.server.embedded.CanalServerWithEmbedded; +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalSourceIncrementConnector implements Source { + + private CanalSourceIncrementConfig sourceConfig; + + private CanalServerWithEmbedded canalServer; + + private ClientIdentity clientIdentity; + + private String tableFilter = null; + + private String fieldFilter = null; + + private volatile boolean running = false; + + private static final int maxEmptyTimes = 10; + + private RdbTableMgr tableMgr; + + @Override + public Class configClass() { + return CanalSourceConfig.class; + } + + @Override + public void init(Config config) throws Exception { + // init config for canal source connector + this.sourceConfig = (CanalSourceIncrementConfig) config; + } + + @Override + public void init(ConnectorContext connectorContext) throws Exception { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + CanalSourceConfig canalSourceConfig = (CanalSourceConfig) sourceConnectorContext.getSourceConfig(); + this.sourceConfig = ConfigUtil.parse(canalSourceConfig.getSourceConfig(), CanalSourceIncrementConfig.class); + if (sourceConnectorContext.getRecordPositionList() != null) { + this.sourceConfig.setRecordPositions(sourceConnectorContext.getRecordPositionList()); + } + + if (StringUtils.isNotEmpty(sourceConfig.getTableFilter())) { + tableFilter = sourceConfig.getTableFilter(); + } + if (StringUtils.isNotEmpty(sourceConfig.getFieldFilter())) { + fieldFilter = sourceConfig.getFieldFilter(); + } + + canalServer = CanalServerWithEmbedded.instance(); + + canalServer.setCanalInstanceGenerator(new CanalInstanceGenerator() { + @Override + public CanalInstance generate(String destination) { + Canal canal = buildCanal(sourceConfig); + + CanalInstanceWithManager instance = new CanalInstanceWithManager(canal, tableFilter) { + + protected CanalHAController initHaController() { + return super.initHaController(); + } + + protected void startEventParserInternal(CanalEventParser parser, boolean isGroup) { + super.startEventParserInternal(parser, isGroup); + + if (eventParser instanceof MysqlEventParser) { + // set eventParser support type + ((MysqlEventParser) eventParser).setSupportBinlogFormats("ROW"); + ((MysqlEventParser) eventParser).setSupportBinlogImages("FULL"); + MysqlEventParser mysqlEventParser = (MysqlEventParser) eventParser; + mysqlEventParser.setParallel(false); + if (StringUtils.isNotEmpty(fieldFilter)) { + mysqlEventParser.setFieldFilter(fieldFilter); + } + + CanalHAController haController = mysqlEventParser.getHaController(); + if (!haController.isStart()) { + haController.start(); + } + } + } + }; + return instance; + } + }); + DatabaseConnection.sourceConfig = sourceConfig.getSourceConnectorConfig(); + DatabaseConnection.initSourceConnection(); + tableMgr = new RdbTableMgr(sourceConfig.getSourceConnectorConfig(), DatabaseConnection.sourceDataSource); + } + + private Canal buildCanal(CanalSourceIncrementConfig sourceConfig) { + long slaveId = 10000; + if (sourceConfig.getSlaveId() != null) { + slaveId = sourceConfig.getSlaveId(); + } + + Canal canal = new Canal(); + canal.setId(sourceConfig.getCanalInstanceId()); + canal.setName(sourceConfig.getDestination()); + canal.setDesc(sourceConfig.getDesc()); + + CanalParameter parameter = new CanalParameter(); + + parameter.setRunMode(RunMode.EMBEDDED); + parameter.setClusterMode(ClusterMode.STANDALONE); + parameter.setMetaMode(MetaMode.MEMORY); + parameter.setHaMode(HAMode.HEARTBEAT); + parameter.setIndexMode(IndexMode.MEMORY); + parameter.setStorageMode(StorageMode.MEMORY); + parameter.setMemoryStorageBufferSize(32 * 1024); + + parameter.setSourcingType(SourcingType.MYSQL); + parameter.setDbAddresses(Collections.singletonList(new InetSocketAddress(sourceConfig.getSourceConnectorConfig().getDbAddress(), + sourceConfig.getSourceConnectorConfig().getDbPort()))); + parameter.setDbUsername(sourceConfig.getSourceConnectorConfig().getUserName()); + parameter.setDbPassword(sourceConfig.getSourceConnectorConfig().getPassWord()); + + // set if enabled gtid mode + parameter.setGtidEnable(sourceConfig.isGTIDMode()); + + // check positions + // example: Arrays.asList("{\"journalName\":\"mysql-bin.000001\",\"position\":6163L,\"timestamp\":1322803601000L}", + // "{\"journalName\":\"mysql-bin.000001\",\"position\":6163L,\"timestamp\":1322803601000L}") + if (sourceConfig.getRecordPositions() != null && !sourceConfig.getRecordPositions().isEmpty()) { + List recordPositions = sourceConfig.getRecordPositions(); + List positions = new ArrayList<>(); + recordPositions.forEach(recordPosition -> { + Map recordPositionMap = new HashMap<>(); + CanalRecordPartition canalRecordPartition = (CanalRecordPartition) (recordPosition.getRecordPartition()); + CanalRecordOffset canalRecordOffset = (CanalRecordOffset) (recordPosition.getRecordOffset()); + recordPositionMap.put("journalName", canalRecordPartition.getJournalName()); + recordPositionMap.put("timestamp", canalRecordPartition.getTimeStamp()); + recordPositionMap.put("position", canalRecordOffset.getOffset()); + // for mariaDB not support gtid mode + if (sourceConfig.isGTIDMode() && !sourceConfig.isMariaDB()) { + String gtidRange = canalRecordOffset.getGtid(); + if (gtidRange != null) { + if (canalRecordOffset.getCurrentGtid() != null) { + gtidRange = EntryParser.replaceGtidRange(canalRecordOffset.getGtid(), canalRecordOffset.getCurrentGtid(), + sourceConfig.getServerUUID()); + } + recordPositionMap.put("gtid", gtidRange); + } + } + positions.add(JsonUtils.toJSONString(recordPositionMap)); + }); + parameter.setPositions(positions); + } + + parameter.setSlaveId(slaveId); + + parameter.setDefaultConnectionTimeoutInSeconds(30); + parameter.setConnectionCharset("UTF-8"); + parameter.setConnectionCharsetNumber((byte) 33); + parameter.setReceiveBufferSize(8 * 1024); + parameter.setSendBufferSize(8 * 1024); + + // heartbeat detect + parameter.setDetectingEnable(false); + + parameter.setDdlIsolation(sourceConfig.isDdlSync()); + parameter.setFilterTableError(sourceConfig.isFilterTableError()); + parameter.setMemoryStorageRawEntry(false); + + canal.setCanalParameter(parameter); + return canal; + } + + + @Override + public void start() throws Exception { + if (running) { + return; + } + tableMgr.start(); + canalServer.start(); + + canalServer.start(sourceConfig.getDestination()); + this.clientIdentity = new ClientIdentity(sourceConfig.getDestination(), sourceConfig.getClientId(), tableFilter); + canalServer.subscribe(clientIdentity); + + running = true; + } + + + @Override + public void commit(ConnectRecord record) { + long batchId = Long.parseLong(record.getExtension("messageId")); + int batchIndex = record.getExtension("batchIndex", Integer.class); + int totalBatches = record.getExtension("totalBatches", Integer.class); + if (batchIndex == totalBatches - 1) { + log.debug("ack records batchIndex:{}, totalBatches:{}, batchId:{}", + batchIndex, totalBatches, batchId); + canalServer.ack(clientIdentity, batchId); + } + } + + @Override + public String name() { + return this.sourceConfig.getSourceConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + + } + + @Override + public void stop() { + if (!running) { + return; + } + running = false; + canalServer.stop(sourceConfig.getDestination()); + canalServer.stop(); + } + + @Override + public List poll() { + int emptyTimes = 0; + com.alibaba.otter.canal.protocol.Message message = null; + if (sourceConfig.getBatchTimeout() < 0) { + while (running) { + message = canalServer.getWithoutAck(clientIdentity, sourceConfig.getBatchSize()); + if (message == null || message.getId() == -1L) { // empty + applyWait(emptyTimes++); + } else { + break; + } + } + } else { // perform with timeout + while (running) { + message = + canalServer.getWithoutAck(clientIdentity, sourceConfig.getBatchSize(), sourceConfig.getBatchTimeout(), TimeUnit.MILLISECONDS); + if (message == null || message.getId() == -1L) { // empty + continue; + } + break; + } + } + + List entries; + assert message != null; + if (message.isRaw()) { + entries = new ArrayList<>(message.getRawEntries().size()); + for (ByteString entry : message.getRawEntries()) { + try { + entries.add(Entry.parseFrom(entry)); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } + } + } else { + entries = message.getEntries(); + } + + List result = new ArrayList<>(); + // key: Xid offset + Map> connectorRecordMap = EntryParser.parse(sourceConfig, entries, tableMgr); + + if (!connectorRecordMap.isEmpty()) { + Set>> entrySet = connectorRecordMap.entrySet(); + for (Map.Entry> entry : entrySet) { + List connectRecordList = entry.getValue(); + CanalConnectRecord lastRecord = entry.getValue().get(connectRecordList.size() - 1); + CanalRecordPartition canalRecordPartition = new CanalRecordPartition(); + canalRecordPartition.setServerUUID(sourceConfig.getServerUUID()); + canalRecordPartition.setJournalName(lastRecord.getJournalName()); + canalRecordPartition.setTimeStamp(lastRecord.getExecuteTime()); + // Xid offset with gtid + Long binLogOffset = entry.getKey(); + CanalRecordOffset canalRecordOffset = new CanalRecordOffset(); + canalRecordOffset.setOffset(binLogOffset); + if (StringUtils.isNotEmpty(lastRecord.getGtid()) && StringUtils.isNotEmpty(lastRecord.getCurrentGtid())) { + canalRecordOffset.setGtid(lastRecord.getGtid()); + canalRecordOffset.setCurrentGtid(lastRecord.getCurrentGtid()); + } + + // split record list + List> splitLists = new ArrayList<>(); + for (int i = 0; i < connectRecordList.size(); i += sourceConfig.getBatchSize()) { + int end = Math.min(i + sourceConfig.getBatchSize(), connectRecordList.size()); + List subList = connectRecordList.subList(i, end); + splitLists.add(subList); + } + + for (int i = 0; i < splitLists.size(); i++) { + ConnectRecord connectRecord = new ConnectRecord(canalRecordPartition, canalRecordOffset, System.currentTimeMillis()); + connectRecord.addExtension("messageId", String.valueOf(message.getId())); + connectRecord.addExtension("batchIndex", i); + connectRecord.addExtension("totalBatches", splitLists.size()); + connectRecord.setData(JsonUtils.toJSONString(splitLists.get(i)).getBytes(StandardCharsets.UTF_8)); + result.add(connectRecord); + } + } + } else { + // for the message has been filtered need ack message + canalServer.ack(clientIdentity, message.getId()); + } + + return result; + } + + // Handle the situation of no data and avoid empty loop death + private void applyWait(int emptyTimes) { + int newEmptyTimes = Math.min(emptyTimes, maxEmptyTimes); + if (emptyTimes <= 3) { + Thread.yield(); + } else { + LockSupport.parkNanos(1000 * 1000L * newEmptyTimes); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java new file mode 100644 index 0000000000..0ae1f8f8ff --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/CanalFullPositionMgr.java @@ -0,0 +1,250 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.position; + +import org.apache.eventmesh.common.AbstractComponent; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalSourceFullConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.JobRdbFullPosition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbColumnDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.Constants; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.offset.canal.CanalFullRecordOffset; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.canal.DatabaseConnection; +import org.apache.eventmesh.connector.canal.source.table.RdbSimpleTable; +import org.apache.eventmesh.connector.canal.source.table.RdbTableMgr; + +import org.apache.commons.lang3.StringUtils; + +import java.sql.JDBCType; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.LinkedHashMap; +import java.util.Map; + +import javax.sql.DataSource; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CanalFullPositionMgr extends AbstractComponent { + + private final CanalSourceFullConfig config; + private final Map positions = new LinkedHashMap<>(); + private final RdbTableMgr tableMgr; + + public CanalFullPositionMgr(CanalSourceFullConfig config, RdbTableMgr tableMgr) { + this.config = config; + this.tableMgr = tableMgr; + } + + @Override + protected void run() throws Exception { + if (config == null || config.getSourceConnectorConfig() == null || config.getSourceConnectorConfig().getDatabases() == null) { + log.info("config or database is null"); + return; + } + prepareRecordPosition(); + initPositions(); + } + + public void prepareRecordPosition() { + if (config.getStartPosition() != null && !config.getStartPosition().isEmpty()) { + for (RecordPosition record : config.getStartPosition()) { + CanalFullRecordOffset offset = (CanalFullRecordOffset) record.getRecordOffset(); + RdbSimpleTable table = new RdbSimpleTable(offset.getPosition().getSchema(), offset.getPosition().getTableName()); + positions.put(table, offset.getPosition()); + } + } + } + + public JobRdbFullPosition getPosition(RdbSimpleTable table) { + return positions.get(table); + } + + public boolean isFinished() { + for (JobRdbFullPosition position : positions.values()) { + if (!position.isFinished()) { + log.info("schema [{}] table [{}] is not finish", position.getSchema(), position.getTableName()); + return false; + } + } + return true; + } + + private void initPositions() { + for (RdbDBDefinition database : config.getSourceConnectorConfig().getDatabases()) { + for (RdbTableDefinition table : database.getTables()) { + try { + RdbSimpleTable simpleTable = new RdbSimpleTable(database.getSchemaName(), table.getTableName()); + RdbTableDefinition tableDefinition; + if ((tableDefinition = tableMgr.getTable(simpleTable)) == null) { + log.error("db [{}] table [{}] definition is null", database.getSchemaName(), table.getTableName()); + continue; + } + log.info("init position of data [{}] table [{}]", database.getSchemaName(), table.getTableName()); + + JobRdbFullPosition recordPosition = positions.get(simpleTable); + if (recordPosition == null || !recordPosition.isFinished()) { + positions.put(simpleTable, + fetchTableInfo(DatabaseConnection.sourceDataSource, (MySQLTableDef) tableDefinition, recordPosition)); + } + } catch (Exception e) { + log.error("process schema [{}] table [{}] position fail", database.getSchemaName(), table.getTableName(), e); + } + + } + } + } + + private JobRdbFullPosition fetchTableInfo(DataSource dataSource, MySQLTableDef tableDefinition, JobRdbFullPosition recordPosition) + throws SQLException { + TableFullPosition position = new TableFullPosition(); + Map preMinPrimaryKeys = new LinkedHashMap<>(); + Map preMaxPrimaryKeys = new LinkedHashMap<>(); + for (String pk : tableDefinition.getPrimaryKeys()) { + Object min = fetchMinPrimaryKey(dataSource, tableDefinition, preMinPrimaryKeys, pk); + Object max = fetchMaxPrimaryKey(dataSource, tableDefinition, preMaxPrimaryKeys, pk); + preMinPrimaryKeys.put(pk, min); + preMaxPrimaryKeys.put(pk, max); + position.getCurPrimaryKeyCols().put(pk, min); + position.getMinPrimaryKeyCols().put(pk, min); + position.getMaxPrimaryKeyCols().put(pk, max); + } + JobRdbFullPosition jobRdbFullPosition = new JobRdbFullPosition(); + if (recordPosition != null) { + if (StringUtils.isNotBlank(recordPosition.getPrimaryKeyRecords())) { + TableFullPosition record = JsonUtils.parseObject(recordPosition.getPrimaryKeyRecords(), TableFullPosition.class); + if (record != null && record.getCurPrimaryKeyCols() != null && !record.getCurPrimaryKeyCols().isEmpty()) { + position.setCurPrimaryKeyCols(record.getCurPrimaryKeyCols()); + } + } + jobRdbFullPosition.setPercent(recordPosition.getPercent()); + } + long rowCount = queryCurTableRowCount(dataSource, tableDefinition); + jobRdbFullPosition.setSchema(tableDefinition.getSchemaName()); + jobRdbFullPosition.setTableName(tableDefinition.getTableName()); + jobRdbFullPosition.setMaxCount(rowCount); + jobRdbFullPosition.setPrimaryKeyRecords(JsonUtils.toJSONString(position)); + return jobRdbFullPosition; + } + + + private long queryCurTableRowCount(DataSource datasource, MySQLTableDef tableDefinition) throws SQLException { + String sql = "select `AVG_ROW_LENGTH`,`DATA_LENGTH` from information_schema.TABLES where `TABLE_SCHEMA`='" + tableDefinition.getSchemaName() + + "' and `TABLE_NAME`='" + tableDefinition.getTableName() + "'"; + try (Statement statement = datasource.getConnection().createStatement(); ResultSet resultSet = statement.executeQuery(sql)) { + long result = 0L; + if (resultSet.next()) { + long avgRowLength = resultSet.getLong("AVG_ROW_LENGTH"); + long dataLength = resultSet.getLong("DATA_LENGTH"); + if (avgRowLength != 0L) { + result = dataLength / avgRowLength; + } + } + return result; + } + } + + private void appendPrePrimaryKey(Map preMap, StringBuilder sql) { + if (preMap != null && !preMap.isEmpty()) { + sql.append(" WHERE "); + boolean first = true; + for (Map.Entry entry : preMap.entrySet()) { + if (first) { + first = false; + } else { + sql.append(" AND "); + } + sql.append(Constants.MySQLQuot).append(entry.getKey()).append(Constants.MySQLQuot).append("=?"); + } + } + } + + private void setValue2Statement(PreparedStatement ps, Map preMap, MySQLTableDef tableDefinition) throws SQLException { + if (preMap != null && !preMap.isEmpty()) { + int index = 1; + for (Map.Entry entry : preMap.entrySet()) { + RdbColumnDefinition def = tableDefinition.getColumnDefinitions().get(entry.getKey()); + ps.setObject(index, entry.getValue(), def.getJdbcType().getVendorTypeNumber()); + ++index; + } + } + } + + private Object fetchMinPrimaryKey(DataSource dataSource, MySQLTableDef tableDefinition, Map prePrimary, String curPrimaryKeyCol) + throws SQLException { + StringBuilder builder = new StringBuilder(); + builder.append("SELECT MIN(").append(Constants.MySQLQuot).append(curPrimaryKeyCol).append(Constants.MySQLQuot) + .append(") min_primary_key FROM").append(Constants.MySQLQuot).append(tableDefinition.getSchemaName()).append(Constants.MySQLQuot) + .append(".").append(Constants.MySQLQuot).append(tableDefinition.getTableName()).append(Constants.MySQLQuot); + appendPrePrimaryKey(prePrimary, builder); + String sql = builder.toString(); + log.info("fetch min primary sql [{}]", sql); + try (PreparedStatement statement = dataSource.getConnection().prepareStatement(sql)) { + setValue2Statement(statement, prePrimary, tableDefinition); + try (ResultSet resultSet = statement.executeQuery()) { + if (resultSet.next()) { + RdbColumnDefinition columnDefinition = tableDefinition.getColumnDefinitions().get(curPrimaryKeyCol); + if (columnDefinition.getJdbcType() == JDBCType.TIMESTAMP) { + return resultSet.getString("min_primary_key"); + } else { + return resultSet.getObject("min_primary_key"); + } + } + } + } + return null; + } + + private Object fetchMaxPrimaryKey(DataSource dataSource, MySQLTableDef tableDefinition, Map prePrimary, String curPrimaryKeyCol) + throws SQLException { + StringBuilder builder = new StringBuilder(); + builder.append("SELECT MAX(").append(Constants.MySQLQuot).append(curPrimaryKeyCol).append(Constants.MySQLQuot) + .append(") max_primary_key FROM").append(Constants.MySQLQuot).append(tableDefinition.getSchemaName()).append(Constants.MySQLQuot) + .append(".").append(Constants.MySQLQuot).append(tableDefinition.getTableName()).append(Constants.MySQLQuot); + appendPrePrimaryKey(prePrimary, builder); + String sql = builder.toString(); + log.info("fetch max primary sql [{}]", sql); + try (PreparedStatement statement = dataSource.getConnection().prepareStatement(sql)) { + setValue2Statement(statement, prePrimary, tableDefinition); + try (ResultSet resultSet = statement.executeQuery()) { + if (resultSet.next()) { + RdbColumnDefinition columnDefinition = tableDefinition.getColumnDefinitions().get(curPrimaryKeyCol); + if (columnDefinition.getJdbcType() == JDBCType.TIMESTAMP) { + return resultSet.getString("max_primary_key"); + } else { + return resultSet.getObject("max_primary_key"); + } + } + } + } + return null; + } + + + @Override + protected void shutdown() throws Exception { + + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/TableFullPosition.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/TableFullPosition.java new file mode 100644 index 0000000000..b1a8024ec5 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/position/TableFullPosition.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.position; + +import java.util.LinkedHashMap; +import java.util.Map; + +import lombok.Data; + +@Data +public class TableFullPosition { + private Map curPrimaryKeyCols = new LinkedHashMap<>(); + private Map minPrimaryKeyCols = new LinkedHashMap<>(); + private Map maxPrimaryKeyCols = new LinkedHashMap<>(); +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/ConnectorRecordPartition.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/table/RdbSimpleTable.java similarity index 56% rename from eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/ConnectorRecordPartition.java rename to eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/table/RdbSimpleTable.java index 3308654385..5b9c35fff3 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/ConnectorRecordPartition.java +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/table/RdbSimpleTable.java @@ -15,53 +15,45 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; +package org.apache.eventmesh.connector.canal.source.table; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; -import java.util.Map; import java.util.Objects; -/** - * extend record partition - */ -public class ConnectorRecordPartition extends RecordPartition { - - /** - * connect name - */ - private String connectorName; - - public ConnectorRecordPartition() { +import lombok.Data; +@Data +public class RdbSimpleTable extends RdbTableDefinition { + public RdbSimpleTable(String database, String schema, String tableName) { + this.schemaName = schema; + this.tableName = tableName; + this.database = database; } - public ConnectorRecordPartition(String connectorName, Map partition) { - super(partition); - this.connectorName = connectorName; + public RdbSimpleTable(String schema, String tableName) { + this(null, schema, tableName); } - public String getConnectorName() { - return connectorName; - } + private final String database; @Override public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof ConnectorRecordPartition)) { + if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } - ConnectorRecordPartition that = (ConnectorRecordPartition) o; - return this.connectorName.equals(that.connectorName); + RdbSimpleTable that = (RdbSimpleTable) o; + return Objects.equals(database, that.database); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), connectorName); + return Objects.hash(super.hashCode(), database); } } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/table/RdbTableMgr.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/table/RdbTableMgr.java new file mode 100644 index 0000000000..de7a45dc99 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/source/table/RdbTableMgr.java @@ -0,0 +1,182 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.source.table; + +import org.apache.eventmesh.common.AbstractComponent; +import org.apache.eventmesh.common.config.connector.rdb.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.canal.CanalMySQLType; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbDBDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.RdbTableDefinition; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLColumnDef; +import org.apache.eventmesh.common.config.connector.rdb.canal.mysql.MySQLTableDef; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.connector.canal.SqlUtils; + +import java.sql.JDBCType; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import javax.sql.DataSource; + +import lombok.extern.slf4j.Slf4j; + +/** + * Description: + */ +@Slf4j + +public class RdbTableMgr extends AbstractComponent { + private final JdbcConfig config; + private final Map tables = new HashMap<>(); + private final DataSource dataSource; + + public RdbTableMgr(JdbcConfig config, DataSource dataSource) { + this.config = config; + this.dataSource = dataSource; + } + + public RdbTableDefinition getTable(String schema, String tableName) { + return getTable(new RdbSimpleTable(schema, tableName)); + } + + public RdbTableDefinition getTable(RdbSimpleTable table) { + return tables.get(table); + } + + @Override + protected void run() { + if (config != null && config.getDatabases() != null) { + for (RdbDBDefinition db : config.getDatabases()) { + if (db.getTables() == null) { + log.warn("init db [{}] position, but it's tables are null", db.getSchemaName()); + continue; + } + for (RdbTableDefinition table : db.getTables()) { + try { + MySQLTableDef mysqlTable = new MySQLTableDef(); + mysqlTable.setSchemaName(db.getSchemaName()); + mysqlTable.setTableName(table.getTableName()); + List tables = Collections.singletonList(table.getTableName()); + Map> primaryKeys = queryTablePrimaryKey(db.getSchemaName(), tables); + Map> columns = queryColumns(db.getSchemaName(), tables); + if (primaryKeys == null || primaryKeys.isEmpty() || primaryKeys.get(table.getTableName()) == null) { + log.warn("init db [{}] table [{}] info, and primary keys are empty", db.getSchemaName(), table.getTableName()); + } else { + mysqlTable.setPrimaryKeys(new HashSet<>(primaryKeys.get(table.getTableName()))); + } + if (columns == null || columns.isEmpty() || columns.get(table.getTableName()) == null) { + log.warn("init db [{}] table [{}] info, and columns are empty", db.getSchemaName(), table.getTableName()); + throw new EventMeshException("db [{}] table [{}] columns are empty"); + } else { + LinkedHashMap cols = new LinkedHashMap<>(); + columns.get(table.getTableName()).forEach(x -> cols.put(x.getName(), x)); + mysqlTable.setColumnDefinitions(cols); + } + + this.tables.put(new RdbSimpleTable(db.getSchemaName(), table.getTableName()), mysqlTable); + } catch (SQLException e) { + log.error("init rdb table schema [{}] table [{}] fail", db.getSchemaName(), table.getTableName(), e); + throw new EventMeshException(e); + } + } + + } + } + } + + private Map> queryTablePrimaryKey(String schema, List tables) throws SQLException { + Map> primaryKeys = new LinkedHashMap<>(); + String prepareTables = SqlUtils.genPrepareSqlOfInClause(tables.size()); + String sql = "select L.TABLE_NAME,L.COLUMN_NAME,R.CONSTRAINT_TYPE from " + + "INFORMATION_SCHEMA.KEY_COLUMN_USAGE L left join INFORMATION_SCHEMA.TABLE_CONSTRAINTS R on L" + + ".TABLE_SCHEMA = R.TABLE_SCHEMA and L.TABLE_NAME = R.TABLE_NAME and L.CONSTRAINT_CATALOG = R" + + ".CONSTRAINT_CATALOG and L.CONSTRAINT_SCHEMA = R.CONSTRAINT_SCHEMA and L.CONSTRAINT_NAME = R" + + ".CONSTRAINT_NAME where L.TABLE_SCHEMA = ? and L.TABLE_NAME in " + prepareTables + " and R" + + ".CONSTRAINT_TYPE IN ('PRIMARY KEY') order by L.ORDINAL_POSITION asc"; + try (PreparedStatement statement = dataSource.getConnection().prepareStatement(sql)) { + statement.setString(1, schema); + SqlUtils.setInClauseParameters(statement, 2, tables); + ResultSet resultSet = statement.executeQuery(); + if (resultSet == null) { + return null; + } + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + String colName = resultSet.getString("COLUMN_NAME"); + primaryKeys.compute(tableName, (k, v) -> { + if (v == null) { + v = new LinkedList<>(); + } + v.add(colName); + return v; + }); + } + resultSet.close(); + } + return primaryKeys; + } + + private Map> queryColumns(String schema, List tables) throws SQLException { + String prepareTables = SqlUtils.genPrepareSqlOfInClause(tables.size()); + String sql = "select TABLE_SCHEMA,TABLE_NAME,COLUMN_NAME,IS_NULLABLE,DATA_TYPE,CHARACTER_MAXIMUM_LENGTH," + + "CHARACTER_OCTET_LENGTH,NUMERIC_SCALE,NUMERIC_PRECISION,DATETIME_PRECISION,CHARACTER_SET_NAME," + + "COLLATION_NAME,COLUMN_TYPE,COLUMN_DEFAULT,COLUMN_COMMENT,ORDINAL_POSITION,EXTRA from " + + "INFORMATION_SCHEMA.COLUMNS where TABLE_SCHEMA = ? and TABLE_NAME in " + prepareTables + " order by " + "ORDINAL_POSITION asc"; + Map> cols = new LinkedHashMap<>(); + try (PreparedStatement statement = dataSource.getConnection().prepareStatement(sql)) { + statement.setString(1, schema); + SqlUtils.setInClauseParameters(statement, 2, tables); + ResultSet resultSet = statement.executeQuery(); + if (resultSet == null) { + return null; + } + while (resultSet.next()) { + String dataType = resultSet.getString("DATA_TYPE"); + JDBCType jdbcType = SqlUtils.toJDBCType(dataType); + MySQLColumnDef col = new MySQLColumnDef(); + col.setJdbcType(jdbcType); + col.setType(CanalMySQLType.valueOfCode(dataType)); + String colName = resultSet.getString("COLUMN_NAME"); + col.setName(colName); + String tableName = resultSet.getString("TABLE_NAME"); + cols.compute(tableName, (k, v) -> { + if (v == null) { + v = new LinkedList<>(); + } + v.add(col); + return v; + }); + } + resultSet.close(); + } + return cols; + } + + @Override + protected void shutdown() throws Exception { + + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/AbstractSqlTemplate.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/AbstractSqlTemplate.java new file mode 100644 index 0000000000..10c647c8f1 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/AbstractSqlTemplate.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.template; + +/** + * implement SQL CRUD with standard SQL + */ +public abstract class AbstractSqlTemplate implements SqlTemplate { + + private static final String DOT = "."; + + public String getSelectSql(String schemaName, String tableName, String[] pkNames, String[] columnNames) { + StringBuilder sql = new StringBuilder("select "); + int size = columnNames.length; + for (int i = 0; i < size; i++) { + sql.append(appendEscape(columnNames[i])).append((i + 1 < size) ? " , " : ""); + } + + sql.append(" from ").append(getFullName(schemaName, tableName)).append(" where ( "); + appendColumnEquals(sql, pkNames, "and"); + sql.append(" ) "); + return sql.toString().intern(); + } + + public String getUpdateSql(String schemaName, String tableName, String[] pkNames, String[] columnNames, boolean updatePks, String shardColumn) { + StringBuilder sql = new StringBuilder("update " + getFullName(schemaName, tableName) + " set "); + appendExcludeSingleShardColumnEquals(sql, columnNames, ",", updatePks, shardColumn); + sql.append(" where ("); + appendColumnEquals(sql, pkNames, "and"); + sql.append(")"); + return sql.toString().intern(); + } + + public String getInsertSql(String schemaName, String tableName, String[] pkNames, String[] columnNames) { + StringBuilder sql = new StringBuilder("insert into " + getFullName(schemaName, tableName) + "("); + String[] allColumns = new String[pkNames.length + columnNames.length]; + System.arraycopy(columnNames, 0, allColumns, 0, columnNames.length); + System.arraycopy(pkNames, 0, allColumns, columnNames.length, pkNames.length); + + int size = allColumns.length; + for (int i = 0; i < size; i++) { + sql.append(appendEscape(allColumns[i])).append((i + 1 < size) ? "," : ""); + } + + sql.append(") values ("); + appendColumnQuestions(sql, allColumns); + sql.append(")"); + return sql.toString().intern(); + } + + public String getDeleteSql(String schemaName, String tableName, String[] pkNames) { + StringBuilder sql = new StringBuilder("delete from " + getFullName(schemaName, tableName) + " where "); + appendColumnEquals(sql, pkNames, "and"); + return sql.toString().intern(); + } + + protected String getFullName(String schemaName, String tableName) { + StringBuilder sb = new StringBuilder(); + if (schemaName != null) { + sb.append(appendEscape(schemaName)).append(DOT); + } + sb.append(appendEscape(tableName)); + return sb.toString().intern(); + } + + // ================ helper method ============ + + protected String appendEscape(String columnName) { + return columnName; + } + + protected void appendColumnQuestions(StringBuilder sql, String[] columns) { + int size = columns.length; + for (int i = 0; i < size; i++) { + sql.append("?").append((i + 1 < size) ? " , " : ""); + } + } + + protected void appendColumnEquals(StringBuilder sql, String[] columns, String separator) { + int size = columns.length; + for (int i = 0; i < size; i++) { + sql.append(" ").append(appendEscape(columns[i])).append(" = ").append("? "); + if (i != size - 1) { + sql.append(separator); + } + } + } + + protected void appendExcludeSingleShardColumnEquals(StringBuilder sql, String[] columns, String separator, boolean updatePks, + String excludeShardColumn) { + int size = columns.length; + for (int i = 0; i < size; i++) { + if (!updatePks && columns[i].equals(excludeShardColumn)) { + continue; + } + sql.append(" ").append(appendEscape(columns[i])).append(" = ").append("? "); + if (i != size - 1) { + sql.append(separator); + } + } + } +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/MysqlSqlTemplate.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/MysqlSqlTemplate.java new file mode 100644 index 0000000000..37b45c746f --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/MysqlSqlTemplate.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.template; + +public class MysqlSqlTemplate extends AbstractSqlTemplate { + + private static final String ESCAPE = "`"; + + public String getMergeSql(String schemaName, String tableName, String[] pkNames, String[] columnNames, + String[] viewColumnNames, boolean includePks, String shardColumn) { + StringBuilder sql = new StringBuilder("insert into " + getFullName(schemaName, tableName) + "("); + int size = columnNames.length; + for (int i = 0; i < size; i++) { + sql.append(appendEscape(columnNames[i])).append(" , "); + } + size = pkNames.length; + for (int i = 0; i < size; i++) { + sql.append(appendEscape(pkNames[i])).append((i + 1 < size) ? " , " : ""); + } + + sql.append(") values ("); + size = columnNames.length; + for (int i = 0; i < size; i++) { + sql.append("?").append(" , "); + } + size = pkNames.length; + for (int i = 0; i < size; i++) { + sql.append("?").append((i + 1 < size) ? " , " : ""); + } + sql.append(")"); + sql.append(" on duplicate key update "); + + size = columnNames.length; + for (int i = 0; i < size; i++) { + if (!includePks && columnNames[i].equals(shardColumn)) { + continue; + } + + sql.append(appendEscape(columnNames[i])) + .append("=values(") + .append(appendEscape(columnNames[i])) + .append(")"); + if (includePks) { + sql.append(" , "); + } else { + sql.append((i + 1 < size) ? " , " : ""); + } + } + + if (includePks) { + size = pkNames.length; + for (int i = 0; i < size; i++) { + sql.append(appendEscape(pkNames[i])).append("=values(").append(appendEscape(pkNames[i])).append(")"); + sql.append((i + 1 < size) ? " , " : ""); + } + } + + return sql.toString().intern(); + } + + protected String appendEscape(String columnName) { + return ESCAPE + columnName + ESCAPE; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/SqlTemplate.java b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/SqlTemplate.java new file mode 100644 index 0000000000..5b92cac2eb --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/java/org/apache/eventmesh/connector/canal/template/SqlTemplate.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.canal.template; + +/** + * SqlTemplate + */ +public interface SqlTemplate { + + public String getSelectSql(String schemaName, String tableName, String[] pkNames, String[] columnNames); + + public String getUpdateSql(String schemaName, String tableName, String[] pkNames, String[] columnNames, boolean updatePks, String shardColumn); + + public String getDeleteSql(String schemaName, String tableName, String[] pkNames); + + public String getInsertSql(String schemaName, String tableName, String[] pkNames, String[] columnNames); + + public String getMergeSql(String schemaName, String tableName, String[] pkNames, String[] columnNames, + String[] viewColumnNames, boolean updatePks, String shardColumn); +} diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService new file mode 100644 index 0000000000..f55b34d852 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +MySQL-Source=org.apache.eventmesh.connector.canal.source.connector.CanalSourceConnector +MySQL-Sink=org.apache.eventmesh.connector.canal.sink.connector.CanalSinkConnector diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/server-config.yml b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/server-config.yml new file mode 100644 index 0000000000..5f66dd0f68 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/server-config.yml @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +sourceEnable: true +sinkEnable: true diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/sink-config.yml b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/sink-config.yml new file mode 100644 index 0000000000..210361dc28 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/sink-config.yml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pubSubConfig: + meshAddress: 127.0.0.1:10000 + subject: TopicTest + idc: FT + env: PRD + group: rocketmqSink + appId: 5031 + userName: rocketmqSinkUser + passWord: rocketmqPassWord +connectorConfig: + connectorName: rocketmqSink + nameServer: 127.0.0.1:9876 + topic: TopicTest diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/source-config.yml b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/source-config.yml new file mode 100644 index 0000000000..7a7880b877 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/main/resources/source-config.yml @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pubSubConfig: + meshAddress: 127.0.0.1:10000 + subject: TopicTest + idc: FT + env: PRD + group: rocketmqSource + appId: 5032 + userName: rocketmqSourceUser + passWord: rocketmqPassWord +connectorConfig: + connectorName: rocketmqSource + nameserver: 127.0.0.1:9876 + topic: TopicTest + commitOffsetIntervalMs: 5000 +offsetStorageConfig: + offsetStorageType: nacos + offsetStorageAddr: 127.0.0.1:8848 + extensions: { + #same with topic + dataId: TopicTest, + #same with group + group: rocketmqSource + } diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/test/resources/sink-config.yml b/eventmesh-connectors/eventmesh-connector-canal/src/test/resources/sink-config.yml new file mode 100644 index 0000000000..210361dc28 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/test/resources/sink-config.yml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pubSubConfig: + meshAddress: 127.0.0.1:10000 + subject: TopicTest + idc: FT + env: PRD + group: rocketmqSink + appId: 5031 + userName: rocketmqSinkUser + passWord: rocketmqPassWord +connectorConfig: + connectorName: rocketmqSink + nameServer: 127.0.0.1:9876 + topic: TopicTest diff --git a/eventmesh-connectors/eventmesh-connector-canal/src/test/resources/source-config.yml b/eventmesh-connectors/eventmesh-connector-canal/src/test/resources/source-config.yml new file mode 100644 index 0000000000..7a7880b877 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-canal/src/test/resources/source-config.yml @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pubSubConfig: + meshAddress: 127.0.0.1:10000 + subject: TopicTest + idc: FT + env: PRD + group: rocketmqSource + appId: 5032 + userName: rocketmqSourceUser + passWord: rocketmqPassWord +connectorConfig: + connectorName: rocketmqSource + nameserver: 127.0.0.1:9876 + topic: TopicTest + commitOffsetIntervalMs: 5000 +offsetStorageConfig: + offsetStorageType: nacos + offsetStorageAddr: 127.0.0.1:8848 + extensions: { + #same with topic + dataId: TopicTest, + #same with group + group: rocketmqSource + } diff --git a/eventmesh-connectors/eventmesh-connector-chatgpt/build.gradle b/eventmesh-connectors/eventmesh-connector-chatgpt/build.gradle index 7de61f3cbe..95b80d02e8 100644 --- a/eventmesh-connectors/eventmesh-connector-chatgpt/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-chatgpt/build.gradle @@ -19,10 +19,10 @@ dependencies { api project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation project(":eventmesh-common") implementation 'com.theokanning.openai-gpt3-java:service:0.18.2' - implementation 'io.cloudevents:cloudevents-http-vertx:2.3.0' + implementation 'io.cloudevents:cloudevents-http-vertx:3.0.0' implementation 'io.vertx:vertx-web:4.5.8' testImplementation "org.apache.httpcomponents:httpclient" compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' -} \ No newline at end of file +} diff --git a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/config/ChatGPTServerConfig.java b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/config/ChatGPTServerConfig.java index 7d162920d7..a8d026067e 100644 --- a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/config/ChatGPTServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/config/ChatGPTServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.chatgpt.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/config/ChatGPTSourceConfig.java b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/config/ChatGPTSourceConfig.java index 9596866910..21ddd84dc6 100644 --- a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/config/ChatGPTSourceConfig.java +++ b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/config/ChatGPTSourceConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.chatgpt.source.config; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java index a947bc135d..1b6955feb2 100644 --- a/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-chatgpt/src/main/java/org/apache/eventmesh/connector/chatgpt/source/connector/ChatGPTSourceConnector.java @@ -18,6 +18,7 @@ package org.apache.eventmesh.connector.chatgpt.source.connector; import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.common.exception.EventMeshException; import org.apache.eventmesh.connector.chatgpt.source.config.ChatGPTSourceConfig; import org.apache.eventmesh.connector.chatgpt.source.dto.ChatGPTRequestDTO; @@ -25,7 +26,6 @@ import org.apache.eventmesh.connector.chatgpt.source.handlers.ChatHandler; import org.apache.eventmesh.connector.chatgpt.source.handlers.ParseHandler; import org.apache.eventmesh.connector.chatgpt.source.managers.OpenaiManager; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -61,8 +61,6 @@ @Slf4j public class ChatGPTSourceConnector implements Source { - private static final int DEFAULT_BATCH_SIZE = 10; - private ChatGPTSourceConfig sourceConfig; private BlockingQueue queue; private HttpServer server; @@ -79,6 +77,9 @@ public class ChatGPTSourceConnector implements Source { private static final String APPLICATION_JSON = "application/json"; private static final String TEXT_PLAIN = "text/plain"; + private int maxBatchSize; + private long maxPollWaitTime; + @Override public Class configClass() { @@ -129,7 +130,9 @@ private void doInit() { if (StringUtils.isNotEmpty(parsePromptTemplateStr)) { this.parseHandler = new ParseHandler(openaiManager, parsePromptTemplateStr); } - this.queue = new LinkedBlockingQueue<>(1024); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); final Vertx vertx = Vertx.vertx(); final Router router = Router.router(vertx); router.route().path(this.sourceConfig.connectorConfig.getPath()).method(HttpMethod.POST).handler(BodyHandler.create()).handler(ctx -> { @@ -224,6 +227,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { Throwable t = this.server.close().cause(); @@ -234,14 +242,21 @@ public void stop() { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int i = 0; i < DEFAULT_BATCH_SIZE; i++) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int i = 0; i < maxBatchSize; i++) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-dingtalk/build.gradle b/eventmesh-connectors/eventmesh-connector-dingtalk/build.gradle index 46d16c3b35..dfe1a40e95 100644 --- a/eventmesh-connectors/eventmesh-connector-dingtalk/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-dingtalk/build.gradle @@ -25,7 +25,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-sdks:eventmesh-sdk-java") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation "com.aliyun:dingtalk:2.0.61" + implementation "com.aliyun:dingtalk:2.1.27" implementation 'com.google.guava:guava' compileOnly 'org.projectlombok:lombok' diff --git a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/config/DingDingConnectServerConfig.java b/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/config/DingDingConnectServerConfig.java index ec744922c2..d9657bc23b 100644 --- a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/config/DingDingConnectServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/config/DingDingConnectServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.dingtalk.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnector.java b/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnector.java index 754461875f..8c5a1e6611 100644 --- a/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-dingtalk/src/main/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnector.java @@ -17,11 +17,11 @@ package org.apache.eventmesh.connector.dingtalk.sink.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.dingtalk.DingDingSinkConfig; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.dingtalk.common.constants.ConnectRecordExtensionKeys; import org.apache.eventmesh.connector.dingtalk.config.DingDingMessageTemplateType; -import org.apache.eventmesh.connector.dingtalk.sink.config.DingDingSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -103,6 +103,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { isRunning = false; diff --git a/eventmesh-connectors/eventmesh-connector-dingtalk/src/test/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-dingtalk/src/test/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnectorTest.java index 38a136748c..736137629d 100644 --- a/eventmesh-connectors/eventmesh-connector-dingtalk/src/test/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-dingtalk/src/test/java/org/apache/eventmesh/connector/dingtalk/sink/connector/DingDingSinkConnectorTest.java @@ -21,12 +21,10 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import org.apache.eventmesh.common.config.connector.dingtalk.DingDingSinkConfig; import org.apache.eventmesh.connector.dingtalk.common.constants.ConnectRecordExtensionKeys; import org.apache.eventmesh.connector.dingtalk.config.DingDingMessageTemplateType; -import org.apache.eventmesh.connector.dingtalk.sink.config.DingDingSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.lang.reflect.Field; @@ -90,9 +88,7 @@ public void testSendMessageToDingDing() throws Exception { final int times = 3; List records = new ArrayList<>(); for (int i = 0; i < times; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "Hello, EventMesh!".getBytes(StandardCharsets.UTF_8)); connectRecord.addExtension(ConnectRecordExtensionKeys.DINGTALK_TEMPLATE_TYPE, DingDingMessageTemplateType.PLAIN_TEXT.getTemplateType()); diff --git a/eventmesh-connectors/eventmesh-connector-file/build.gradle b/eventmesh-connectors/eventmesh-connector-file/build.gradle index 11eaa5b7e0..ea441248b8 100644 --- a/eventmesh-connectors/eventmesh-connector-file/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-file/build.gradle @@ -16,9 +16,10 @@ */ dependencies { + implementation project(":eventmesh-common") api project(":eventmesh-openconnect:eventmesh-openconnect-java") - testImplementation('org.junit.jupiter:junit-jupiter') - testImplementation('org.mockito:mockito-junit-jupiter') + testImplementation 'org.junit.jupiter:junit-jupiter' + testImplementation 'org.mockito:mockito-junit-jupiter' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' } \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/config/FileServerConfig.java b/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/config/FileServerConfig.java index b6b506521e..ad25b8a410 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/config/FileServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/config/FileServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.file.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/connector/FileSinkConnector.java b/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/connector/FileSinkConnector.java index f1d1ccc57b..fabae0d43a 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/connector/FileSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/sink/connector/FileSinkConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.file.sink.connector; -import org.apache.eventmesh.connector.file.sink.config.FileSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.file.FileSinkConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -103,6 +103,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { outputStream.flush(); diff --git a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/connector/FileSourceConnector.java b/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/connector/FileSourceConnector.java index 2b55284741..68b1a50989 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/connector/FileSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-file/src/main/java/org/apache/eventmesh/connector/file/source/connector/FileSourceConnector.java @@ -17,13 +17,14 @@ package org.apache.eventmesh.connector.file.source.connector; -import org.apache.eventmesh.connector.file.source.config.FileSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.file.FileSourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.file.FileRecordPartition; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.io.BufferedReader; import java.io.File; @@ -34,9 +35,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import lombok.extern.slf4j.Slf4j; @@ -87,6 +86,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { try { @@ -118,9 +122,9 @@ public List poll() { } public static RecordPartition convertToRecordPartition(String fileName) { - Map map = new HashMap<>(); - map.put("fileName", fileName); - return new RecordPartition(map); + FileRecordPartition fileRecordPartition = new FileRecordPartition(); + fileRecordPartition.setFileName(fileName); + return fileRecordPartition; } private static String getFileName(String filePath) throws NullPointerException { diff --git a/eventmesh-connectors/eventmesh-connector-file/src/test/java/org/apache/eventmesh/connector/file/FileSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-file/src/test/java/org/apache/eventmesh/connector/file/FileSourceConnectorTest.java index 49421dd077..9cfea3cc59 100644 --- a/eventmesh-connectors/eventmesh-connector-file/src/test/java/org/apache/eventmesh/connector/file/FileSourceConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-file/src/test/java/org/apache/eventmesh/connector/file/FileSourceConnectorTest.java @@ -20,8 +20,8 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import org.apache.eventmesh.connector.file.source.config.FileSourceConfig; -import org.apache.eventmesh.connector.file.source.config.SourceConnectorConfig; +import org.apache.eventmesh.common.config.connector.file.FileSourceConfig; +import org.apache.eventmesh.common.config.connector.file.SourceConnectorConfig; import org.apache.eventmesh.connector.file.source.connector.FileSourceConnector; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; diff --git a/eventmesh-connectors/eventmesh-connector-http/build.gradle b/eventmesh-connectors/eventmesh-connector-http/build.gradle index 11e0d78023..48c7aecd06 100644 --- a/eventmesh-connectors/eventmesh-connector-http/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-http/build.gradle @@ -21,12 +21,13 @@ dependencies { implementation 'io.cloudevents:cloudevents-http-vertx:3.0.0' implementation 'io.vertx:vertx-web:4.5.8' - implementation 'io.vertx:vertx-web-client:4.5.8' + implementation 'io.vertx:vertx-web-client:4.5.9' implementation 'dev.failsafe:failsafe:3.3.2' - testImplementation "org.apache.httpcomponents:httpclient" + + testImplementation 'org.apache.httpcomponents.client5:httpclient5:5.4' + testImplementation 'org.apache.httpcomponents.client5:httpclient5-fluent:5.4' testImplementation 'org.mock-server:mockserver-netty:5.15.0' - testImplementation 'com.squareup.okhttp3:okhttp:4.12.0' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' -} \ No newline at end of file +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java new file mode 100644 index 0000000000..9989552d1e --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/common/SynchronizedCircularFifoQueue.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.common; + +import org.apache.commons.collections4.queue.CircularFifoQueue; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + + +/** + * SynchronizedCircularFifoQueue is a synchronized version of CircularFifoQueue. + */ +public class SynchronizedCircularFifoQueue extends CircularFifoQueue { + + /** + *

Default constructor. capacity = 32

+ */ + public SynchronizedCircularFifoQueue() { + super(); + } + + public SynchronizedCircularFifoQueue(Collection coll) { + super(coll); + } + + public SynchronizedCircularFifoQueue(int size) { + super(size); + } + + @Override + public synchronized boolean add(E element) { + return super.add(element); + } + + @Override + public synchronized void clear() { + super.clear(); + } + + @Override + public synchronized E element() { + return super.element(); + } + + @Override + public synchronized E get(int index) { + return super.get(index); + } + + @Override + public synchronized boolean isAtFullCapacity() { + return super.isAtFullCapacity(); + } + + @Override + public synchronized boolean isEmpty() { + return super.isEmpty(); + } + + @Override + public synchronized boolean isFull() { + return super.isFull(); + } + + @Override + public synchronized int maxSize() { + return super.maxSize(); + } + + @Override + public synchronized boolean offer(E element) { + return super.offer(element); + } + + @Override + public synchronized E peek() { + return super.peek(); + } + + @Override + public synchronized E poll() { + return super.poll(); + } + + @Override + public synchronized E remove() { + return super.remove(); + } + + @Override + public synchronized int size() { + return super.size(); + } + + /** + *

Fetch a range of elements from the queue.

+ * + * @param start start index + * @param end end index + * @param removed whether to remove the elements from the queue + * @return list of elements + */ + public synchronized List fetchRange(int start, int end, boolean removed) { + + if (start < 0 || start > end) { + throw new IllegalArgumentException("Invalid range"); + } + end = Math.min(end, this.size()); + + Iterator iterator = this.iterator(); + List items = new ArrayList<>(end - start); + + int count = 0; + while (iterator.hasNext() && count < end) { + E item = iterator.next(); + if (item != null && count >= start) { + // Add the element to the list + items.add(item); + if (removed) { + // Remove the element from the queue + iterator.remove(); + } + } + count++; + } + return items; + } + + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/config/HttpServerConfig.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/config/HttpServerConfig.java index 81a9f20923..8517b869fd 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/config/HttpServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/config/HttpServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.http.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/server/HttpConnectServer.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/server/HttpConnectServer.java index 8d753d2815..dbe0838bc8 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/server/HttpConnectServer.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/server/HttpConnectServer.java @@ -19,7 +19,7 @@ import org.apache.eventmesh.connector.http.config.HttpServerConfig; import org.apache.eventmesh.connector.http.sink.HttpSinkConnector; -import org.apache.eventmesh.connector.http.source.connector.HttpSourceConnector; +import org.apache.eventmesh.connector.http.source.HttpSourceConnector; import org.apache.eventmesh.openconnect.Application; import org.apache.eventmesh.openconnect.util.ConfigUtil; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java index 23d09fa141..3df110f2e7 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnector.java @@ -17,13 +17,14 @@ package org.apache.eventmesh.connector.http.sink; -import org.apache.eventmesh.connector.http.sink.config.HttpSinkConfig; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; -import org.apache.eventmesh.connector.http.sink.handle.CommonHttpSinkHandler; -import org.apache.eventmesh.connector.http.sink.handle.HttpSinkHandler; -import org.apache.eventmesh.connector.http.sink.handle.RetryHttpSinkHandler; -import org.apache.eventmesh.connector.http.sink.handle.WebhookHttpSinkHandler; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.http.HttpSinkConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.sink.handler.HttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.impl.CommonHttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.impl.HttpSinkHandlerRetryWrapper; +import org.apache.eventmesh.connector.http.sink.handler.impl.WebhookHttpSinkHandler; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -37,7 +38,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class HttpSinkConnector implements Sink { +public class HttpSinkConnector implements Sink, ConnectorCreateService { private HttpSinkConfig httpSinkConfig; @@ -49,6 +50,11 @@ public Class configClass() { return HttpSinkConfig.class; } + @Override + public Sink create() { + return new HttpSinkConnector(); + } + @Override public void init(Config config) throws Exception { this.httpSinkConfig = (HttpSinkConfig) config; @@ -80,7 +86,7 @@ private void doInit() { this.sinkHandler = nonRetryHandler; } else if (maxRetries > 0) { // Wrap the sink handler with a retry handler - this.sinkHandler = new RetryHttpSinkHandler(this.httpSinkConfig.connectorConfig, nonRetryHandler); + this.sinkHandler = new HttpSinkHandlerRetryWrapper(this.httpSinkConfig.connectorConfig, nonRetryHandler); } else { throw new IllegalArgumentException("Max retries must be greater than or equal to 0."); } @@ -101,6 +107,11 @@ public String name() { return this.httpSinkConfig.connectorConfig.getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { this.sinkHandler.stop(); diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpAttemptEvent.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpAttemptEvent.java new file mode 100644 index 0000000000..8163852f8f --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpAttemptEvent.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.data; + +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Single HTTP attempt event + */ +public class HttpAttemptEvent { + + public static final String PREFIX = "http-attempt-event-"; + + private final int maxAttempts; + + private final AtomicInteger attempts; + + private Throwable lastException; + + + public HttpAttemptEvent(int maxAttempts) { + this.maxAttempts = maxAttempts; + this.attempts = new AtomicInteger(0); + } + + /** + * Increment the attempts + */ + public void incrementAttempts() { + attempts.incrementAndGet(); + } + + /** + * Update the event, incrementing the attempts and setting the last exception + * + * @param exception the exception to update, can be null + */ + public void updateEvent(Throwable exception) { + // increment the attempts + incrementAttempts(); + + // update the last exception + lastException = exception; + } + + /** + * Check if the attempts are less than the maximum attempts + * + * @return true if the attempts are less than the maximum attempts, false otherwise + */ + public boolean canAttempt() { + return attempts.get() < maxAttempts; + } + + public boolean isComplete() { + if (attempts.get() == 0) { + // No start yet + return false; + } + + // If no attempt can be made or the last exception is null, the event completed + return !canAttempt() || lastException == null; + } + + + public int getMaxAttempts() { + return maxAttempts; + } + + public int getAttempts() { + return attempts.get(); + } + + public Throwable getLastException() { + return lastException; + } + + /** + * Get the limited exception message with the default limit of 256 + * + * @return the limited exception message + */ + public String getLimitedExceptionMessage() { + return getLimitedExceptionMessage(256); + } + + /** + * Get the limited exception message with the specified limit + * + * @param maxLimit the maximum limit of the exception message + * @return the limited exception message + */ + public String getLimitedExceptionMessage(int maxLimit) { + if (lastException == null) { + return ""; + } + String message = lastException.getMessage(); + if (message == null) { + return ""; + } + if (message.length() > maxLimit) { + return message.substring(0, maxLimit); + } + return message; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java index 1bfd223079..9c8b1ce673 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpConnectRecord.java @@ -17,31 +17,64 @@ package org.apache.eventmesh.connector.http.sink.data; +import org.apache.eventmesh.common.remote.offset.http.HttpRecordOffset; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.KeyValue; +import java.io.Serializable; import java.time.LocalDateTime; +import java.util.Base64; +import java.util.HashMap; import java.util.Map; import java.util.UUID; import lombok.Builder; -import lombok.Data; +import lombok.Getter; /** * a special ConnectRecord for HttpSinkConnector */ -@Data +@Getter @Builder -public class HttpConnectRecord { +public class HttpConnectRecord implements Serializable { - private String type; + private static final long serialVersionUID = 5271462532332251473L; + + /** + * The unique identifier for the HttpConnectRecord + */ + private final String httpRecordId = UUID.randomUUID().toString(); - private String time; + /** + * The time when the HttpConnectRecord was created + */ + private LocalDateTime createTime; - private String uuid; + /** + * The type of the HttpConnectRecord + */ + private String type; + /** + * The event id of the HttpConnectRecord + */ private String eventId; - private ConnectRecord data; + private Object data; + + private KeyValue extensions; + + @Override + public String toString() { + return "HttpConnectRecord{" + + "createTime=" + createTime + + ", httpRecordId='" + httpRecordId + + ", type='" + type + + ", eventId='" + eventId + + ", data=" + data + + ", extensions=" + extensions + + '}'; + } /** * Convert ConnectRecord to HttpConnectRecord @@ -50,18 +83,36 @@ public class HttpConnectRecord { * @return the converted HttpConnectRecord */ public static HttpConnectRecord convertConnectRecord(ConnectRecord record, String type) { - Map offsetMap = record.getPosition().getOffset().getOffset(); + Map offsetMap = new HashMap<>(); + if (record != null && record.getPosition() != null && record.getPosition().getRecordOffset() != null) { + if (HttpRecordOffset.class.equals(record.getPosition().getRecordOffsetClazz())) { + offsetMap = ((HttpRecordOffset) record.getPosition().getRecordOffset()).getOffsetMap(); + } + } String offset = "0"; if (!offsetMap.isEmpty()) { offset = offsetMap.values().iterator().next().toString(); } - return HttpConnectRecord.builder() - .type(type) - .time(LocalDateTime.now().toString()) - .uuid(UUID.randomUUID().toString()) - .eventId(type + "-" + offset) - .data(record) - .build(); + if (record.getData() instanceof byte[]) { + String data = Base64.getEncoder().encodeToString((byte[]) record.getData()); + record.addExtension("isBase64", true); + return HttpConnectRecord.builder() + .type(type) + .createTime(LocalDateTime.now()) + .eventId(type + "-" + offset) + .data(data) + .extensions(record.getExtensions()) + .build(); + } else { + record.addExtension("isBase64", false); + return HttpConnectRecord.builder() + .type(type) + .createTime(LocalDateTime.now()) + .eventId(type + "-" + offset) + .data(record.getData()) + .extensions(record.getExtensions()) + .build(); + } } } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java index 848012f152..41a5087870 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportMetadata.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.http.sink.data; +import java.io.Serializable; import java.time.LocalDateTime; import lombok.Builder; @@ -27,7 +28,10 @@ */ @Data @Builder -public class HttpExportMetadata { +public class HttpExportMetadata implements Serializable { + + private static final long serialVersionUID = 1121010466793041920L; + private String url; private int code; @@ -36,7 +40,9 @@ public class HttpExportMetadata { private LocalDateTime receivedTime; - private String uuid; + private String httpRecordId; + + private String recordId; private String retriedBy; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java index b6382aee7a..c6bdb02884 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecord.java @@ -17,6 +17,8 @@ package org.apache.eventmesh.connector.http.sink.data; +import java.io.Serializable; + import lombok.AllArgsConstructor; import lombok.Data; @@ -25,7 +27,9 @@ */ @Data @AllArgsConstructor -public class HttpExportRecord { +public class HttpExportRecord implements Serializable { + + private static final long serialVersionUID = 6010283911452947157L; private HttpExportMetadata metadata; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java index 5c44eb3b7f..81e582c33a 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/HttpExportRecordPage.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.http.sink.data; +import java.io.Serializable; import java.util.List; import lombok.AllArgsConstructor; @@ -27,7 +28,9 @@ */ @Data @AllArgsConstructor -public class HttpExportRecordPage { +public class HttpExportRecordPage implements Serializable { + + private static final long serialVersionUID = 1143791658357035990L; private int pageNum; diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java new file mode 100644 index 0000000000..66f5d0e7ec --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/data/MultiHttpRequestContext.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.data; + +import java.util.concurrent.atomic.AtomicInteger; + + +/** + * Multi HTTP request context + */ +public class MultiHttpRequestContext { + + public static final String NAME = "multi-http-request-context"; + + /** + * The remaining requests to be processed. + */ + private final AtomicInteger remainingRequests; + + /** + * The last failed event. + * If retries occur but still fail, it will be logged, and only the last one will be retained. + */ + private HttpAttemptEvent lastFailedEvent; + + public MultiHttpRequestContext(int remainingEvents) { + this.remainingRequests = new AtomicInteger(remainingEvents); + } + + /** + * Decrement the remaining requests by 1. + */ + public void decrementRemainingRequests() { + remainingRequests.decrementAndGet(); + } + + /** + * Check if all requests have been processed. + * + * @return true if all requests have been processed, false otherwise. + */ + public boolean isAllRequestsProcessed() { + return remainingRequests.get() == 0; + } + + public int getRemainingRequests() { + return remainingRequests.get(); + } + + public HttpAttemptEvent getLastFailedEvent() { + return lastFailedEvent; + } + + public void setLastFailedEvent(HttpAttemptEvent lastFailedEvent) { + this.lastFailedEvent = lastFailedEvent; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java deleted file mode 100644 index e21046c4d2..0000000000 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/CommonHttpSinkHandler.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.connector.http.sink.handle; - -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; -import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; -import org.apache.eventmesh.connector.http.util.HttpUtils; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; - -import java.net.URI; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import io.netty.handler.codec.http.HttpHeaderNames; -import io.vertx.core.Future; -import io.vertx.core.MultiMap; -import io.vertx.core.Vertx; -import io.vertx.core.buffer.Buffer; -import io.vertx.core.http.HttpHeaders; -import io.vertx.ext.web.client.HttpResponse; -import io.vertx.ext.web.client.WebClient; -import io.vertx.ext.web.client.WebClientOptions; - -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - -/** - * Common HTTP/HTTPS Sink Handler implementation to handle ConnectRecords by sending them over HTTP or HTTPS to configured URLs. - * - *

This handler initializes a WebClient for making HTTP requests based on the provided SinkConnectorConfig. - * It handles processing ConnectRecords by converting them to HttpConnectRecord and sending them asynchronously to each configured URL using the - * WebClient.

- * - *

The handler uses Vert.x's WebClient to perform HTTP/HTTPS requests. It initializes the WebClient in the {@link #start()} - * method and closes it in the {@link #stop()} method to manage resources efficiently.

- * - *

Each ConnectRecord is processed and sent to all configured URLs concurrently using asynchronous HTTP requests.

- */ -@Slf4j -@Getter -public class CommonHttpSinkHandler implements HttpSinkHandler { - - private final SinkConnectorConfig connectorConfig; - - private final List urls; - - private WebClient webClient; - - - public CommonHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { - this.connectorConfig = sinkConnectorConfig; - // Initialize URLs - String[] urlStrings = sinkConnectorConfig.getUrls(); - this.urls = Arrays.stream(urlStrings) - .map(URI::create) - .collect(Collectors.toList()); - } - - /** - * Initializes the WebClient for making HTTP requests based on the provided SinkConnectorConfig. - */ - @Override - public void start() { - // Create WebClient - doInitWebClient(); - } - - /** - * Initializes the WebClient with the provided configuration options. - */ - private void doInitWebClient() { - final Vertx vertx = Vertx.vertx(); - WebClientOptions options = new WebClientOptions() - .setKeepAlive(this.connectorConfig.isKeepAlive()) - .setKeepAliveTimeout(this.connectorConfig.getKeepAliveTimeout() / 1000) - .setIdleTimeout(this.connectorConfig.getIdleTimeout()) - .setIdleTimeoutUnit(TimeUnit.MILLISECONDS) - .setConnectTimeout(this.connectorConfig.getConnectionTimeout()) - .setMaxPoolSize(this.connectorConfig.getMaxConnectionPoolSize()); - this.webClient = WebClient.create(vertx, options); - } - - /** - * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. - * - * @param record the ConnectRecord to process - */ - @Override - public void handle(ConnectRecord record) { - for (URI url : this.urls) { - // convert ConnectRecord to HttpConnectRecord - String type = String.format("%s.%s.%s", connectorConfig.getConnectorName(), url.getScheme(), "common"); - HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - deliver(url, httpConnectRecord); - } - } - - - /** - * Processes HttpConnectRecord on specified URL while returning its own processing logic. - * This method sends the HttpConnectRecord to the specified URL using the WebClient. - * - * @param url URI to which the HttpConnectRecord should be sent - * @param httpConnectRecord HttpConnectRecord to process - * @return processing chain - */ - @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord) { - // create headers - MultiMap headers = HttpHeaders.headers() - .set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=utf-8") - .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8"); - - // get timestamp and offset - Long timestamp = httpConnectRecord.getData().getTimestamp(); - Map offset = httpConnectRecord.getData().getPosition().getOffset().getOffset(); - - // send the request - return this.webClient.post(url.getPath()) - .host(url.getHost()) - .port(url.getPort() == -1 ? (Objects.equals(url.getScheme(), "https") ? 443 : 80) : url.getPort()) - .putHeaders(headers) - .ssl(Objects.equals(url.getScheme(), "https")) - .sendJson(httpConnectRecord) - .onSuccess(res -> { - log.info("Request sent successfully. Record: timestamp={}, offset={}", timestamp, offset); - // log the response - if (HttpUtils.is2xxSuccessful(res.statusCode())) { - if (log.isDebugEnabled()) { - log.debug("Received successful response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", - res.statusCode(), timestamp, offset, res.bodyAsString()); - } else { - log.info("Received successful response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, offset); - } - } else { - if (log.isDebugEnabled()) { - log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}, responseBody={}", - res.statusCode(), timestamp, offset, res.bodyAsString()); - } else { - log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, offset={}", res.statusCode(), timestamp, offset); - } - } - - }) - .onFailure(err -> log.error("Request failed to send. Record: timestamp={}, offset={}", timestamp, offset, err)); - } - - - /** - * Cleans up and releases resources used by the HTTP/HTTPS handler. - */ - @Override - public void stop() { - if (this.webClient != null) { - this.webClient.close(); - } else { - log.warn("WebClient is null, ignore."); - } - } - - -} \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java deleted file mode 100644 index 06700261d5..0000000000 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/RetryHttpSinkHandler.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.connector.http.sink.handle; - -import org.apache.eventmesh.connector.http.sink.config.HttpRetryConfig; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; -import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; -import org.apache.eventmesh.connector.http.sink.data.HttpExportMetadata; -import org.apache.eventmesh.connector.http.sink.data.HttpExportRecord; -import org.apache.eventmesh.connector.http.util.HttpUtils; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; - -import java.net.ConnectException; -import java.net.URI; -import java.time.Duration; -import java.time.LocalDateTime; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; - -import io.vertx.core.Future; -import io.vertx.core.buffer.Buffer; -import io.vertx.ext.web.client.HttpResponse; - -import lombok.extern.slf4j.Slf4j; - -import dev.failsafe.Failsafe; -import dev.failsafe.RetryPolicy; -import dev.failsafe.RetryPolicyBuilder; -import dev.failsafe.event.ExecutionEvent; - - -@Slf4j -public class RetryHttpSinkHandler implements HttpSinkHandler { - - private final SinkConnectorConfig connectorConfig; - - // Retry policy builder - private RetryPolicyBuilder> retryPolicyBuilder; - - private final List urls; - - private final HttpSinkHandler sinkHandler; - - - public RetryHttpSinkHandler(SinkConnectorConfig connectorConfig, HttpSinkHandler sinkHandler) { - this.connectorConfig = connectorConfig; - this.sinkHandler = sinkHandler; - - // Initialize retry - initRetry(); - - // Initialize URLs - String[] urlStrings = connectorConfig.getUrls(); - this.urls = Arrays.stream(urlStrings) - .map(URI::create) - .collect(Collectors.toList()); - } - - private void initRetry() { - HttpRetryConfig httpRetryConfig = this.connectorConfig.getRetryConfig(); - - this.retryPolicyBuilder = RetryPolicy.>builder() - .handleIf(e -> e instanceof ConnectException) - .handleResultIf(response -> httpRetryConfig.isRetryOnNonSuccess() && !HttpUtils.is2xxSuccessful(response.statusCode())) - .withMaxRetries(httpRetryConfig.getMaxRetries()) - .withDelay(Duration.ofMillis(httpRetryConfig.getInterval())); - } - - - /** - * Initializes the WebClient for making HTTP requests based on the provided SinkConnectorConfig. - */ - @Override - public void start() { - sinkHandler.start(); - } - - - /** - * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. - * - * @param record the ConnectRecord to process - */ - @Override - public void handle(ConnectRecord record) { - for (URI url : this.urls) { - // convert ConnectRecord to HttpConnectRecord - String type = String.format("%s.%s.%s", - this.connectorConfig.getConnectorName(), url.getScheme(), - this.connectorConfig.getWebhookConfig().isActivate() ? "webhook" : "common"); - HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - // handle the HttpConnectRecord - deliver(url, httpConnectRecord); - } - } - - - /** - * Processes HttpConnectRecord on specified URL while returning its own processing logic This method provides the retry power to process the - * HttpConnectRecord - * - * @param url URI to which the HttpConnectRecord should be sent - * @param httpConnectRecord HttpConnectRecord to process - * @return processing chain - */ - @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord) { - // Only webhook mode needs to use the UUID to identify the request - String id = httpConnectRecord.getUuid(); - - // Build the retry policy - RetryPolicy> retryPolicy = retryPolicyBuilder - .onSuccess(event -> { - if (connectorConfig.getWebhookConfig().isActivate()) { - // convert the result to an HttpExportRecord - HttpExportRecord exportRecord = covertToExportRecord(httpConnectRecord, event, event.getResult(), event.getException(), url, id); - // add the data to the queue - ((WebhookHttpSinkHandler) sinkHandler).addDataToQueue(exportRecord); - } - }) - .onRetry(event -> { - if (log.isDebugEnabled()) { - log.warn("Retrying the request to {} for the {} time. HttpConnectRecord= {}", url, event.getAttemptCount(), httpConnectRecord); - } else { - log.warn("Retrying the request to {} for the {} time.", url, event.getAttemptCount()); - } - if (connectorConfig.getWebhookConfig().isActivate()) { - HttpExportRecord exportRecord = - covertToExportRecord(httpConnectRecord, event, event.getLastResult(), event.getLastException(), url, id); - ((WebhookHttpSinkHandler) sinkHandler).addDataToQueue(exportRecord); - } - // update the HttpConnectRecord - httpConnectRecord.setTime(LocalDateTime.now().toString()); - httpConnectRecord.setUuid(UUID.randomUUID().toString()); - }) - .onFailure(event -> { - if (log.isDebugEnabled()) { - log.error("Failed to send the request to {} after {} attempts. HttpConnectRecord= {}", url, event.getAttemptCount(), - httpConnectRecord, event.getException()); - } else { - log.error("Failed to send the request to {} after {} attempts.", url, event.getAttemptCount(), event.getException()); - } - if (connectorConfig.getWebhookConfig().isActivate()) { - HttpExportRecord exportRecord = covertToExportRecord(httpConnectRecord, event, event.getResult(), event.getException(), url, id); - ((WebhookHttpSinkHandler) sinkHandler).addDataToQueue(exportRecord); - } - }).build(); - - // Handle the HttpConnectRecord with retry - Failsafe.with(retryPolicy) - .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord).toCompletionStage()); - - return null; - } - - /** - * Converts the ExecutionCompletedEvent to an HttpExportRecord. - * - * @param httpConnectRecord HttpConnectRecord - * @param event ExecutionEvent - * @param response the response of the request, may be null - * @param e the exception thrown during the request, may be null - * @param url the URL the request was sent to - * @param id UUID - * @return the converted HttpExportRecord - */ - private HttpExportRecord covertToExportRecord(HttpConnectRecord httpConnectRecord, ExecutionEvent event, HttpResponse response, - Throwable e, URI url, String id) { - - HttpExportMetadata httpExportMetadata = HttpExportMetadata.builder() - .url(url.toString()) - .code(response != null ? response.statusCode() : -1) - .message(response != null ? response.statusMessage() : e.getMessage()) - .receivedTime(LocalDateTime.now()) - .uuid(httpConnectRecord.getUuid()) - .retriedBy(event.getAttemptCount() > 1 ? id : null) - .retryNum(event.getAttemptCount() - 1).build(); - - return new HttpExportRecord(httpExportMetadata, response == null ? null : response.bodyAsString()); - } - - /** - * Cleans up and releases resources used by the HTTP/HTTPS handler. - */ - @Override - public void stop() { - sinkHandler.stop(); - } -} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java new file mode 100644 index 0000000000..28ba791127 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/AbstractHttpSinkHandler.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.handler; + +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.sink.data.HttpAttemptEvent; +import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; +import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.net.URI; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +/** + * AbstractHttpSinkHandler is an abstract class that provides a base implementation for HttpSinkHandler. + */ +public abstract class AbstractHttpSinkHandler implements HttpSinkHandler { + + private final SinkConnectorConfig sinkConnectorConfig; + + private final List urls; + + protected AbstractHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { + this.sinkConnectorConfig = sinkConnectorConfig; + // Initialize URLs + String[] urlStrings = sinkConnectorConfig.getUrls(); + this.urls = Arrays.stream(urlStrings) + .map(URI::create) + .collect(Collectors.toList()); + } + + public SinkConnectorConfig getSinkConnectorConfig() { + return sinkConnectorConfig; + } + + public List getUrls() { + return urls; + } + + /** + * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. + * + * @param record the ConnectRecord to process + */ + @Override + public void handle(ConnectRecord record) { + // build attributes + Map attributes = new ConcurrentHashMap<>(); + attributes.put(MultiHttpRequestContext.NAME, new MultiHttpRequestContext(urls.size())); + + // send the record to all URLs + for (URI url : urls) { + // convert ConnectRecord to HttpConnectRecord + String type = String.format("%s.%s.%s", + this.sinkConnectorConfig.getConnectorName(), url.getScheme(), + this.sinkConnectorConfig.getWebhookConfig().isActivate() ? "webhook" : "common"); + HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); + + // add AttemptEvent to the attributes + HttpAttemptEvent attemptEvent = new HttpAttemptEvent(this.sinkConnectorConfig.getRetryConfig().getMaxRetries() + 1); + attributes.put(HttpAttemptEvent.PREFIX + httpConnectRecord.getHttpRecordId(), attemptEvent); + + // deliver the record + deliver(url, httpConnectRecord, attributes, record); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/HttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java similarity index 82% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/HttpSinkHandler.java rename to eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java index 09fd66a762..d5a27940e5 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/HttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/HttpSinkHandler.java @@ -15,12 +15,13 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.handle; +package org.apache.eventmesh.connector.http.sink.handler; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.net.URI; +import java.util.Map; import io.vertx.core.Future; import io.vertx.core.buffer.Buffer; @@ -32,14 +33,14 @@ * *

Any class that needs to process ConnectRecords via HTTP or HTTPS should implement this interface. * Implementing classes must provide implementations for the {@link #start()}, {@link #handle(ConnectRecord)}, - * {@link #deliver(URI, HttpConnectRecord)}, and {@link #stop()} methods.

+ * {@link #deliver(URI, HttpConnectRecord, Map, ConnectRecord)}, and {@link #stop()} methods.

* *

Implementing classes should ensure thread safety and handle HTTP/HTTPS communication efficiently. * The {@link #start()} method initializes any necessary resources for HTTP/HTTPS communication. The {@link #handle(ConnectRecord)} method processes a - * ConnectRecord by sending it over HTTP or HTTPS. The {@link #deliver(URI, HttpConnectRecord)} method processes HttpConnectRecord on specified URL - * while returning its own processing logic {@link #stop()} method releases any resources used for HTTP/HTTPS communication.

+ * ConnectRecord by sending it over HTTP or HTTPS. The {@link #deliver(URI, HttpConnectRecord, Map, ConnectRecord)} method processes HttpConnectRecord + * on specified URL while returning its own processing logic {@link #stop()} method releases any resources used for HTTP/HTTPS communication.

* - *

It's recommended to handle exceptions gracefully within the {@link #deliver(URI, HttpConnectRecord)} method + *

It's recommended to handle exceptions gracefully within the {@link #deliver(URI, HttpConnectRecord, Map, ConnectRecord)} method * to prevent message loss or processing interruptions.

*/ public interface HttpSinkHandler { @@ -62,9 +63,10 @@ public interface HttpSinkHandler { * * @param url URI to which the HttpConnectRecord should be sent * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to be used in processing * @return processing chain */ - Future> deliver(URI url, HttpConnectRecord httpConnectRecord); + Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, ConnectRecord connectRecord); /** * Cleans up and releases resources used by the HTTP/HTTPS handler. This method should be called when the handler is no longer needed. diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java new file mode 100644 index 0000000000..61bdc9f310 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/CommonHttpSinkHandler.java @@ -0,0 +1,266 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.handler.impl; + +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.http.sink.data.HttpAttemptEvent; +import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; +import org.apache.eventmesh.connector.http.sink.data.MultiHttpRequestContext; +import org.apache.eventmesh.connector.http.sink.handler.AbstractHttpSinkHandler; +import org.apache.eventmesh.connector.http.util.HttpUtils; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.net.URI; +import java.time.ZoneId; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import io.netty.handler.codec.http.HttpHeaderNames; +import io.vertx.core.Future; +import io.vertx.core.MultiMap; +import io.vertx.core.Vertx; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.http.HttpHeaders; +import io.vertx.ext.web.client.HttpResponse; +import io.vertx.ext.web.client.WebClient; +import io.vertx.ext.web.client.WebClientOptions; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +/** + * Common HTTP/HTTPS Sink Handler implementation to handle ConnectRecords by sending them over HTTP or HTTPS to configured URLs. + * + *

This handler initializes a WebClient for making HTTP requests based on the provided SinkConnectorConfig. + * It handles processing ConnectRecords by converting them to HttpConnectRecord and sending them asynchronously to each configured URL using the + * WebClient.

+ * + *

The handler uses Vert.x's WebClient to perform HTTP/HTTPS requests. It initializes the WebClient in the {@link #start()} + * method and closes it in the {@link #stop()} method to manage resources efficiently.

+ * + *

Each ConnectRecord is processed and sent to all configured URLs concurrently using asynchronous HTTP requests.

+ */ +@Slf4j +@Getter +public class CommonHttpSinkHandler extends AbstractHttpSinkHandler { + + private WebClient webClient; + + + public CommonHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { + super(sinkConnectorConfig); + } + + /** + * Initializes the WebClient for making HTTP requests based on the provided SinkConnectorConfig. + */ + @Override + public void start() { + // Create WebClient + doInitWebClient(); + } + + /** + * Initializes the WebClient with the provided configuration options. + */ + private void doInitWebClient() { + SinkConnectorConfig sinkConnectorConfig = getSinkConnectorConfig(); + final Vertx vertx = Vertx.vertx(); + WebClientOptions options = new WebClientOptions() + .setKeepAlive(sinkConnectorConfig.isKeepAlive()) + .setKeepAliveTimeout(sinkConnectorConfig.getKeepAliveTimeout() / 1000) + .setIdleTimeout(sinkConnectorConfig.getIdleTimeout()) + .setIdleTimeoutUnit(TimeUnit.MILLISECONDS) + .setConnectTimeout(sinkConnectorConfig.getConnectionTimeout()) + .setMaxPoolSize(sinkConnectorConfig.getMaxConnectionPoolSize()); + this.webClient = WebClient.create(vertx, options); + } + + /** + * Processes HttpConnectRecord on specified URL while returning its own processing logic. This method sends the HttpConnectRecord to the specified + * URL using the WebClient. + * + * @param url URI to which the HttpConnectRecord should be sent + * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to be used in processing + * @return processing chain + */ + @Override + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, + ConnectRecord connectRecord) { + // create headers + Map extensionMap = new HashMap<>(); + Set extensionKeySet = httpConnectRecord.getExtensions().keySet(); + for (String extensionKey : extensionKeySet) { + Object v = httpConnectRecord.getExtensions().getObject(extensionKey); + extensionMap.put(extensionKey, v); + } + + MultiMap headers = HttpHeaders.headers() + .set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=utf-8") + .set(HttpHeaderNames.ACCEPT, "application/json; charset=utf-8") + .set("extension", JsonUtils.toJSONString(extensionMap)); + // get timestamp and offset + Long timestamp = httpConnectRecord.getCreateTime() + .atZone(ZoneId.systemDefault()) + .toInstant() + .toEpochMilli(); + + // send the request + return this.webClient.post(url.getPath()) + .host(url.getHost()) + .port(url.getPort() == -1 ? (Objects.equals(url.getScheme(), "https") ? 443 : 80) : url.getPort()) + .putHeaders(headers) + .ssl(Objects.equals(url.getScheme(), "https")) + .sendJson(httpConnectRecord.getData()) + .onSuccess(res -> { + log.info("Request sent successfully. Record: timestamp={}", timestamp); + + Exception e = null; + + // log the response + if (HttpUtils.is2xxSuccessful(res.statusCode())) { + if (log.isDebugEnabled()) { + log.debug("Received successful response: statusCode={}. Record: timestamp={}, responseBody={}", + res.statusCode(), timestamp, res.bodyAsString()); + } else { + log.info("Received successful response: statusCode={}. Record: timestamp={}", res.statusCode(), timestamp); + } + } else { + if (log.isDebugEnabled()) { + log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}, responseBody={}", + res.statusCode(), timestamp, res.bodyAsString()); + } else { + log.warn("Received non-2xx response: statusCode={}. Record: timestamp={}", res.statusCode(), timestamp); + } + + e = new RuntimeException("Unexpected HTTP response code: " + res.statusCode()); + } + + // try callback + tryCallback(httpConnectRecord, e, attributes, connectRecord); + }).onFailure(err -> { + log.error("Request failed to send. Record: timestamp={}", timestamp, err); + + // try callback + tryCallback(httpConnectRecord, err, attributes, connectRecord); + }); + } + + /** + * Tries to call the callback based on the result of the request. + * + * @param httpConnectRecord the HttpConnectRecord to use + * @param e the exception thrown during the request, may be null + * @param attributes additional attributes to be used in processing + */ + private void tryCallback(HttpConnectRecord httpConnectRecord, Throwable e, Map attributes, ConnectRecord record) { + // get and update the attempt event + HttpAttemptEvent attemptEvent = (HttpAttemptEvent) attributes.get(HttpAttemptEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + attemptEvent.updateEvent(e); + + // get and update the multiHttpRequestContext + MultiHttpRequestContext multiHttpRequestContext = getAndUpdateMultiHttpRequestContext(attributes, attemptEvent); + + if (multiHttpRequestContext.isAllRequestsProcessed()) { + // do callback + if (record.getCallback() == null) { + if (log.isDebugEnabled()) { + log.warn("ConnectRecord callback is null. Ignoring callback. {}", record); + } else { + log.warn("ConnectRecord callback is null. Ignoring callback."); + } + return; + } + + // get the last failed event + HttpAttemptEvent lastFailedEvent = multiHttpRequestContext.getLastFailedEvent(); + if (lastFailedEvent == null) { + // success + record.getCallback().onSuccess(convertToSendResult(record)); + } else { + // failure + record.getCallback().onException(buildSendExceptionContext(record, lastFailedEvent.getLastException())); + } + } + } + + + /** + * Gets and updates the multi http request context based on the provided attributes and HttpConnectRecord. + * + * @param attributes the attributes to use + * @param attemptEvent the HttpAttemptEvent to use + * @return the updated multi http request context + */ + private MultiHttpRequestContext getAndUpdateMultiHttpRequestContext(Map attributes, HttpAttemptEvent attemptEvent) { + // get the multi http request context + MultiHttpRequestContext multiHttpRequestContext = (MultiHttpRequestContext) attributes.get(MultiHttpRequestContext.NAME); + + // Check if the current attempted event has completed + if (attemptEvent.isComplete()) { + // decrement the counter + multiHttpRequestContext.decrementRemainingRequests(); + + if (attemptEvent.getLastException() != null) { + // if all attempts are exhausted, set the last failed event + multiHttpRequestContext.setLastFailedEvent(attemptEvent); + } + } + + return multiHttpRequestContext; + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + + private SendExceptionContext buildSendExceptionContext(ConnectRecord record, Throwable e) { + SendExceptionContext sendExceptionContext = new SendExceptionContext(); + sendExceptionContext.setMessageId(record.getRecordId()); + sendExceptionContext.setCause(e); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(record.getExtension("topic"))) { + sendExceptionContext.setTopic(record.getExtension("topic")); + } + return sendExceptionContext; + } + + + /** + * Cleans up and releases resources used by the HTTP/HTTPS handler. + */ + @Override + public void stop() { + if (this.webClient != null) { + this.webClient.close(); + } else { + log.warn("WebClient is null, ignore."); + } + } +} \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java new file mode 100644 index 0000000000..050839451a --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/HttpSinkHandlerRetryWrapper.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.sink.handler.impl; + +import org.apache.eventmesh.common.config.connector.http.HttpRetryConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; +import org.apache.eventmesh.connector.http.sink.handler.AbstractHttpSinkHandler; +import org.apache.eventmesh.connector.http.sink.handler.HttpSinkHandler; +import org.apache.eventmesh.connector.http.util.HttpUtils; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.net.ConnectException; +import java.net.URI; +import java.time.Duration; +import java.util.Map; + +import io.vertx.core.Future; +import io.vertx.core.buffer.Buffer; +import io.vertx.ext.web.client.HttpResponse; + +import lombok.extern.slf4j.Slf4j; + +import dev.failsafe.Failsafe; +import dev.failsafe.RetryPolicy; + + +/** + * HttpSinkHandlerRetryWrapper is a wrapper class for the HttpSinkHandler that provides retry functionality for failed HTTP requests. + */ +@Slf4j +public class HttpSinkHandlerRetryWrapper extends AbstractHttpSinkHandler { + + private final HttpRetryConfig httpRetryConfig; + + private final HttpSinkHandler sinkHandler; + + private final RetryPolicy> retryPolicy; + + public HttpSinkHandlerRetryWrapper(SinkConnectorConfig sinkConnectorConfig, HttpSinkHandler sinkHandler) { + super(sinkConnectorConfig); + this.sinkHandler = sinkHandler; + this.httpRetryConfig = getSinkConnectorConfig().getRetryConfig(); + this.retryPolicy = buildRetryPolicy(); + } + + private RetryPolicy> buildRetryPolicy() { + return RetryPolicy.>builder() + .handleIf(e -> e instanceof ConnectException) + .handleResultIf(response -> httpRetryConfig.isRetryOnNonSuccess() && !HttpUtils.is2xxSuccessful(response.statusCode())) + .withMaxRetries(httpRetryConfig.getMaxRetries()) + .withDelay(Duration.ofMillis(httpRetryConfig.getInterval())) + .onRetry(event -> { + if (log.isDebugEnabled()) { + log.warn("Failed to deliver message after {} attempts. Retrying in {} ms. Error: {}", + event.getAttemptCount(), httpRetryConfig.getInterval(), event.getLastException()); + } else { + log.warn("Failed to deliver message after {} attempts. Retrying in {} ms.", + event.getAttemptCount(), httpRetryConfig.getInterval()); + } + }).onFailure(event -> { + if (log.isDebugEnabled()) { + log.error("Failed to deliver message after {} attempts. Error: {}", + event.getAttemptCount(), event.getException()); + } else { + log.error("Failed to deliver message after {} attempts.", + event.getAttemptCount()); + } + }).build(); + } + + /** + * Initializes the WebClient for making HTTP requests based on the provided SinkConnectorConfig. + */ + @Override + public void start() { + sinkHandler.start(); + } + + + /** + * Processes HttpConnectRecord on specified URL while returning its own processing logic This method provides the retry power to process the + * HttpConnectRecord + * + * @param url URI to which the HttpConnectRecord should be sent + * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to pass to the processing chain + * @return processing chain + */ + @Override + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, + ConnectRecord connectRecord) { + Failsafe.with(retryPolicy) + .getStageAsync(() -> sinkHandler.deliver(url, httpConnectRecord, attributes, connectRecord).toCompletionStage()); + return null; + } + + + /** + * Cleans up and releases resources used by the HTTP/HTTPS handler. + */ + @Override + public void stop() { + sinkHandler.stop(); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/WebhookHttpSinkHandler.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java similarity index 65% rename from eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/WebhookHttpSinkHandler.java rename to eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java index e07683fcfa..0751918ee7 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handle/WebhookHttpSinkHandler.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/sink/handler/impl/WebhookHttpSinkHandler.java @@ -15,11 +15,13 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.sink.handle; +package org.apache.eventmesh.connector.http.sink.handler.impl; +import org.apache.eventmesh.common.config.connector.http.HttpWebhookConfig; +import org.apache.eventmesh.common.config.connector.http.SinkConnectorConfig; import org.apache.eventmesh.common.exception.EventMeshException; -import org.apache.eventmesh.connector.http.sink.config.HttpWebhookConfig; -import org.apache.eventmesh.connector.http.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.connector.http.common.SynchronizedCircularFifoQueue; +import org.apache.eventmesh.connector.http.sink.data.HttpAttemptEvent; import org.apache.eventmesh.connector.http.sink.data.HttpConnectRecord; import org.apache.eventmesh.connector.http.sink.data.HttpExportMetadata; import org.apache.eventmesh.connector.http.sink.data.HttpExportRecord; @@ -30,13 +32,10 @@ import java.net.URI; import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Objects; -import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; import io.netty.handler.codec.http.HttpResponseStatus; import io.vertx.core.Future; @@ -64,8 +63,6 @@ @Slf4j public class WebhookHttpSinkHandler extends CommonHttpSinkHandler { - private final SinkConnectorConfig sinkConnectorConfig; - // the configuration for webhook private final HttpWebhookConfig webhookConfig; @@ -73,25 +70,31 @@ public class WebhookHttpSinkHandler extends CommonHttpSinkHandler { private HttpServer exportServer; // store the received data, when webhook is enabled - private final ConcurrentLinkedQueue receivedDataQueue; + private final SynchronizedCircularFifoQueue receivedDataQueue; + + private volatile boolean exportStarted = false; - // the maximum queue size - private final int maxQueueSize; + private volatile boolean exportDestroyed = false; - // the current queue size - private final AtomicInteger currentQueueSize; + public boolean isExportStarted() { + return exportStarted; + } + + public boolean isExportDestroyed() { + return exportDestroyed; + } public WebhookHttpSinkHandler(SinkConnectorConfig sinkConnectorConfig) { super(sinkConnectorConfig); - this.sinkConnectorConfig = sinkConnectorConfig; + this.webhookConfig = sinkConnectorConfig.getWebhookConfig(); - this.maxQueueSize = this.webhookConfig.getMaxStorageSize(); - this.currentQueueSize = new AtomicInteger(0); - this.receivedDataQueue = new ConcurrentLinkedQueue<>(); + int maxQueueSize = this.webhookConfig.getMaxStorageSize(); + this.receivedDataQueue = new SynchronizedCircularFifoQueue<>(maxQueueSize); // init the export server doInitExportServer(); } + /** * Initialize the server for exporting the received data */ @@ -135,7 +138,7 @@ private void doInitExportServer() { int pageNum = StringUtils.isBlank(pageNumStr) ? 1 : Integer.parseInt(pageNumStr); int pageSize = Integer.parseInt(pageSizeStr); - if (currentQueueSize.get() == 0) { + if (receivedDataQueue.isEmpty()) { ctx.response() .putHeader(HttpHeaders.CONTENT_TYPE, "application/json; charset=utf-8") .setStatusCode(HttpResponseStatus.NO_CONTENT.code()) @@ -148,12 +151,12 @@ private void doInitExportServer() { List exportRecords; if (Objects.equals(type, TypeEnum.POLL.getValue())) { // If the type is poll, only the first page of data is exported and removed - exportRecords = getDataFromQueue(0, pageSize, true); + exportRecords = receivedDataQueue.fetchRange(0, pageSize, true); } else { // If the type is peek, the specified page of data is exported without removing int startIndex = (pageNum - 1) * pageSize; int endIndex = startIndex + pageSize; - exportRecords = getDataFromQueue(startIndex, endIndex, false); + exportRecords = receivedDataQueue.fetchRange(startIndex, endIndex, false); } // Create HttpExportRecordPage @@ -185,26 +188,15 @@ public void start() { // start the webclient super.start(); // start the export server - Throwable t = this.exportServer.listen().cause(); - if (t != null) { - throw new EventMeshException("Failed to start Vertx server. ", t); - } - } - - /** - * Processes a ConnectRecord by sending it over HTTP or HTTPS. This method should be called for each ConnectRecord that needs to be processed. - * - * @param record the ConnectRecord to process - */ - @Override - public void handle(ConnectRecord record) { - for (URI url : super.getUrls()) { - // convert ConnectRecord to HttpConnectRecord - String type = String.format("%s.%s.%s", this.getConnectorConfig().getConnectorName(), url.getScheme(), "webhook"); - HttpConnectRecord httpConnectRecord = HttpConnectRecord.convertConnectRecord(record, type); - // handle the HttpConnectRecord - deliver(url, httpConnectRecord); - } + this.exportServer.listen(res -> { + if (res.succeeded()) { + this.exportStarted = true; + log.info("WebhookHttpExportServer started on port: {}", this.webhookConfig.getPort()); + } else { + log.error("WebhookHttpExportServer failed to start on port: {}", this.webhookConfig.getPort()); + throw new EventMeshException("Failed to start Vertx server. ", res.cause()); + } + }); } @@ -214,91 +206,63 @@ public void handle(ConnectRecord record) { * * @param url URI to which the HttpConnectRecord should be sent * @param httpConnectRecord HttpConnectRecord to process + * @param attributes additional attributes to be used in processing * @return processing chain */ @Override - public Future> deliver(URI url, HttpConnectRecord httpConnectRecord) { + public Future> deliver(URI url, HttpConnectRecord httpConnectRecord, Map attributes, + ConnectRecord connectRecord) { // send the request - Future> responseFuture = super.deliver(url, httpConnectRecord); + Future> responseFuture = super.deliver(url, httpConnectRecord, attributes, connectRecord); // store the received data return responseFuture.onComplete(arr -> { - // If open retry, return directly and handled by RetryHttpSinkHandler - if (sinkConnectorConfig.getRetryConfig().getMaxRetries() > 0) { - return; - } - // create ExportMetadataBuilder + // get HttpAttemptEvent + HttpAttemptEvent attemptEvent = (HttpAttemptEvent) attributes.get(HttpAttemptEvent.PREFIX + httpConnectRecord.getHttpRecordId()); + + // get the response HttpResponse response = arr.succeeded() ? arr.result() : null; - HttpExportMetadata httpExportMetadata = HttpExportMetadata.builder() - .url(url.toString()) - .code(response != null ? response.statusCode() : -1) - .message(response != null ? response.statusMessage() : arr.cause().getMessage()) - .receivedTime(LocalDateTime.now()) - .retriedBy(null) - .uuid(httpConnectRecord.getUuid()) - .retryNum(0) - .build(); + // create ExportMetadata + HttpExportMetadata httpExportMetadata = buildHttpExportMetadata(url, response, httpConnectRecord, attemptEvent); // create ExportRecord HttpExportRecord exportRecord = new HttpExportRecord(httpExportMetadata, arr.succeeded() ? arr.result().bodyAsString() : null); // add the data to the queue - addDataToQueue(exportRecord); + receivedDataQueue.offer(exportRecord); }); } - /** - * Adds the received data to the queue. + * Builds the HttpExportMetadata object based on the response, HttpConnectRecord, and HttpRetryEvent. * - * @param exportRecord the received data to add to the queue + * @param url the URI to which the HttpConnectRecord was sent + * @param response the response received from the URI + * @param httpConnectRecord the HttpConnectRecord that was sent + * @param attemptEvent the HttpAttemptEvent that was used to send the HttpConnectRecord + * @return the HttpExportMetadata object */ - public void addDataToQueue(HttpExportRecord exportRecord) { - // If the current queue size is greater than or equal to the maximum queue size, remove the oldest element - if (currentQueueSize.get() >= maxQueueSize) { - Object removedData = receivedDataQueue.poll(); - if (log.isDebugEnabled()) { - log.debug("The queue is full, remove the oldest element: {}", removedData); - } else { - log.info("The queue is full, remove the oldest element"); - } - currentQueueSize.decrementAndGet(); + private HttpExportMetadata buildHttpExportMetadata(URI url, HttpResponse response, HttpConnectRecord httpConnectRecord, + HttpAttemptEvent attemptEvent) { + + String msg = null; + // order of precedence: lastException > response > null + if (attemptEvent.getLastException() != null) { + msg = attemptEvent.getLimitedExceptionMessage(); + } else if (response != null) { + msg = response.statusMessage(); } - // Try to put the received data into the queue - if (receivedDataQueue.offer(exportRecord)) { - currentQueueSize.incrementAndGet(); - log.debug("Successfully put the received data into the queue: {}", exportRecord); - } else { - log.error("Failed to put the received data into the queue: {}", exportRecord); - } - } - /** - * Gets the received data from the queue. - * - * @param startIndex the start index of the data to get - * @param endIndex the end index of the data to get - * @param removed whether to remove the data from the queue - * @return the received data - */ - private List getDataFromQueue(int startIndex, int endIndex, boolean removed) { - Iterator iterator = receivedDataQueue.iterator(); - - List pageItems = new ArrayList<>(endIndex - startIndex); - int count = 0; - while (iterator.hasNext() && count < endIndex) { - HttpExportRecord item = iterator.next(); - if (count >= startIndex) { - pageItems.add(item); - if (removed) { - iterator.remove(); - currentQueueSize.decrementAndGet(); - } - } - count++; - } - return pageItems; + return HttpExportMetadata.builder() + .url(url.toString()) + .code(response != null ? response.statusCode() : -1) + .message(msg) + .receivedTime(LocalDateTime.now()) + .recordId(httpConnectRecord.getHttpRecordId()) + .retryNum(attemptEvent.getAttempts() - 1) + .build(); } + /** * Cleans up and releases resources used by the HTTP/HTTPS handler. */ @@ -308,10 +272,15 @@ public void stop() { super.stop(); // stop the export server if (this.exportServer != null) { - Throwable t = this.exportServer.close().cause(); - if (t != null) { - throw new EventMeshException("Failed to stop Vertx server. ", t); - } + this.exportServer.close(res -> { + if (res.succeeded()) { + this.exportDestroyed = true; + log.info("WebhookHttpExportServer stopped on port: {}", this.webhookConfig.getPort()); + } else { + log.error("WebhookHttpExportServer failed to stop on port: {}", this.webhookConfig.getPort()); + throw new EventMeshException("Failed to stop Vertx server. ", res.cause()); + } + }); } else { log.warn("Callback server is null, ignore."); } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java new file mode 100644 index 0000000000..6c78badaf4 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/HttpSourceConnector.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source; + +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.http.HttpSourceConfig; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.connector.http.source.protocol.Protocol; +import org.apache.eventmesh.connector.http.source.protocol.ProtocolFactory; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import io.netty.handler.codec.http.HttpResponseStatus; +import io.vertx.core.Vertx; +import io.vertx.core.http.HttpServer; +import io.vertx.core.http.HttpServerOptions; +import io.vertx.ext.web.Route; +import io.vertx.ext.web.Router; +import io.vertx.ext.web.handler.LoggerHandler; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class HttpSourceConnector implements Source, ConnectorCreateService { + + private HttpSourceConfig sourceConfig; + + private BlockingQueue queue; + + private int maxBatchSize; + + private long maxPollWaitTime; + + private Route route; + + private Protocol protocol; + + private HttpServer server; + + @Getter + private volatile boolean started = false; + + @Getter + private volatile boolean destroyed = false; + + + @Override + public Class configClass() { + return HttpSourceConfig.class; + } + + @Override + public Source create() { + return new HttpSourceConnector(); + } + + @Override + public void init(Config config) { + this.sourceConfig = (HttpSourceConfig) config; + doInit(); + } + + @Override + public void init(ConnectorContext connectorContext) { + SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; + this.sourceConfig = (HttpSourceConfig) sourceConnectorContext.getSourceConfig(); + doInit(); + } + + private void doInit() { + // init queue + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + + // init poll batch size and timeout + this.maxBatchSize = this.sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = this.sourceConfig.getPollConfig().getMaxWaitTime(); + + // init protocol + String protocolName = this.sourceConfig.getConnectorConfig().getProtocol(); + this.protocol = ProtocolFactory.getInstance(this.sourceConfig.connectorConfig, protocolName); + + final Vertx vertx = Vertx.vertx(); + final Router router = Router.router(vertx); + route = router.route() + .path(this.sourceConfig.connectorConfig.getPath()) + .handler(LoggerHandler.create()); + + // set protocol handler + this.protocol.setHandler(route, queue); + + // create server + this.server = vertx.createHttpServer(new HttpServerOptions() + .setPort(this.sourceConfig.connectorConfig.getPort()) + .setMaxFormAttributeSize(this.sourceConfig.connectorConfig.getMaxFormAttributeSize()) + .setIdleTimeout(this.sourceConfig.connectorConfig.getIdleTimeout()) + .setIdleTimeoutUnit(TimeUnit.MILLISECONDS)).requestHandler(router); + } + + @Override + public void start() { + this.server.listen(res -> { + if (res.succeeded()) { + this.started = true; + log.info("HttpSourceConnector started on port: {}", this.sourceConfig.getConnectorConfig().getPort()); + } else { + log.error("HttpSourceConnector failed to start on port: {}", this.sourceConfig.getConnectorConfig().getPort()); + throw new EventMeshException("failed to start Vertx server", res.cause()); + } + }); + } + + @Override + public void commit(ConnectRecord record) { + if (this.route != null && sourceConfig.getConnectorConfig().isDataConsistencyEnabled()) { + this.route.handler(ctx -> { + // Return 200 OK + ctx.response() + .putHeader("content-type", "application/json") + .setStatusCode(HttpResponseStatus.OK.code()) + .end("{\"status\":\"success\",\"recordId\":\"" + record.getRecordId() + "\"}"); + }); + } + } + + @Override + public String name() { + return this.sourceConfig.getConnectorConfig().getConnectorName(); + } + + @Override + public void onException(ConnectRecord record) { + if (this.route != null) { + this.route.failureHandler(ctx -> { + log.error("Failed to handle the request, recordId {}. ", record.getRecordId(), ctx.failure()); + // Return Bad Response + ctx.response() + .setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()) + .end("{\"status\":\"failed\",\"recordId\":\"" + record.getRecordId() + "\"}"); + }); + } + } + + @Override + public void stop() { + if (this.server != null) { + this.server.close(res -> { + if (res.succeeded()) { + this.destroyed = true; + log.info("HttpSourceConnector stopped on port: {}", this.sourceConfig.getConnectorConfig().getPort()); + } else { + log.error("HttpSourceConnector failed to stop on port: {}", this.sourceConfig.getConnectorConfig().getPort()); + throw new EventMeshException("failed to stop Vertx server", res.cause()); + } + } + ); + } else { + log.warn("HttpSourceConnector server is null, ignore."); + } + } + + @Override + public List poll() { + // record current time + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + // poll from queue + List connectRecords = new ArrayList<>(maxBatchSize); + for (int i = 0; i < maxBatchSize; i++) { + try { + Object obj = queue.poll(remainingTime, TimeUnit.MILLISECONDS); + if (obj == null) { + break; + } + // convert to ConnectRecord + ConnectRecord connectRecord = protocol.convertToConnectRecord(obj); + connectRecords.add(connectRecord); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; + } catch (Exception e) { + log.error("Failed to poll from queue.", e); + break; + } + } + return connectRecords; + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/connector/HttpSourceConnector.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/connector/HttpSourceConnector.java deleted file mode 100644 index f9a6c568e0..0000000000 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/connector/HttpSourceConnector.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.connector.http.source.connector; - -import org.apache.eventmesh.common.exception.EventMeshException; -import org.apache.eventmesh.connector.http.source.config.HttpSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; -import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; -import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; -import org.apache.eventmesh.openconnect.api.source.Source; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.util.CloudEventUtil; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; - -import io.cloudevents.CloudEvent; -import io.cloudevents.http.vertx.VertxMessageFactory; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.vertx.core.Vertx; -import io.vertx.core.http.HttpMethod; -import io.vertx.core.http.HttpServer; -import io.vertx.core.http.HttpServerOptions; -import io.vertx.ext.web.Router; -import io.vertx.ext.web.handler.LoggerHandler; - -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class HttpSourceConnector implements Source { - - private static final int DEFAULT_BATCH_SIZE = 10; - - private HttpSourceConfig sourceConfig; - private BlockingQueue queue; - private HttpServer server; - - @Override - public Class configClass() { - return HttpSourceConfig.class; - } - - @Override - public void init(Config config) { - this.sourceConfig = (HttpSourceConfig) config; - doInit(); - } - - @Override - public void init(ConnectorContext connectorContext) { - SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; - this.sourceConfig = (HttpSourceConfig) sourceConnectorContext.getSourceConfig(); - doInit(); - } - - private void doInit() { - this.queue = new LinkedBlockingQueue<>(1000); - - final Vertx vertx = Vertx.vertx(); - final Router router = Router.router(vertx); - router.route() - .path(this.sourceConfig.connectorConfig.getPath()) - .method(HttpMethod.POST) - .handler(LoggerHandler.create()) - .handler(ctx -> { - VertxMessageFactory.createReader(ctx.request()) - .map(reader -> { - CloudEvent event = reader.toEvent(); - if (event.getSubject() == null) { - throw new IllegalStateException("attribute 'subject' cannot be null"); - } - if (event.getDataContentType() == null) { - throw new IllegalStateException("attribute 'datacontenttype' cannot be null"); - } - if (event.getData() == null) { - throw new IllegalStateException("attribute 'data' cannot be null"); - } - return event; - }) - .onSuccess(event -> { - queue.add(event); - log.info("[HttpSourceConnector] Succeed to convert payload into CloudEvent. StatusCode={}", HttpResponseStatus.OK.code()); - ctx.response().setStatusCode(HttpResponseStatus.OK.code()).end(); - }) - .onFailure(t -> { - log.error("[HttpSourceConnector] Malformed request. StatusCode={}", HttpResponseStatus.BAD_REQUEST.code(), t); - ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end(); - }); - }); - this.server = vertx.createHttpServer(new HttpServerOptions() - .setPort(this.sourceConfig.connectorConfig.getPort()) - .setIdleTimeout(this.sourceConfig.connectorConfig.getIdleTimeout())).requestHandler(router); - } - - @Override - public void start() { - Throwable t = this.server.listen().cause(); - if (t != null) { - throw new EventMeshException("failed to start Vertx server", t); - } - } - - @Override - public void commit(ConnectRecord record) { - - } - - @Override - public String name() { - return this.sourceConfig.getConnectorConfig().getConnectorName(); - } - - @Override - public void stop() { - Throwable t = this.server.close().cause(); - if (t != null) { - throw new EventMeshException("failed to stop Vertx server", t); - } - } - - @Override - public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int i = 0; i < DEFAULT_BATCH_SIZE; i++) { - try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); - if (event == null) { - break; - } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); - } catch (InterruptedException e) { - break; - } - } - return connectRecords; - } - -} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/CommonResponse.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/CommonResponse.java new file mode 100644 index 0000000000..870f2afbe5 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/CommonResponse.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.data; + +import java.io.Serializable; +import java.time.LocalDateTime; + +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * Webhook response. + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class CommonResponse implements Serializable { + + private static final long serialVersionUID = 8616938575207104455L; + + private String msg; + + private LocalDateTime handleTime; + + /** + * Convert to json string. + * + * @return json string + */ + public String toJsonStr() { + return JSON.toJSONString(this, Feature.WriteMapNullValue); + } + + + /** + * Create a success response. + * + * @return response + */ + public static CommonResponse success() { + return base("success"); + } + + + /** + * Create a base response. + * + * @param msg message + * @return response + */ + public static CommonResponse base(String msg) { + return new CommonResponse(msg, LocalDateTime.now()); + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java new file mode 100644 index 0000000000..9e1dcb7b4c --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/data/WebhookRequest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.data; + +import java.io.Serializable; +import java.util.Map; + +import io.vertx.ext.web.RoutingContext; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * Webhook Protocol Request. + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class WebhookRequest implements Serializable { + + private static final long serialVersionUID = -483500600756490500L; + + private String protocolName; + + private String url; + + private Map headers; + + private Object payload; + + private RoutingContext routingContext; + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java new file mode 100644 index 0000000000..c5a22139e0 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/Protocol.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.protocol; + +import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.concurrent.BlockingQueue; + +import io.vertx.ext.web.Route; + + +/** + * Protocol Interface. + * All protocols should implement this interface. + */ +public interface Protocol { + + + /** + * Initialize the protocol. + * + * @param sourceConnectorConfig source connector config + */ + void initialize(SourceConnectorConfig sourceConnectorConfig); + + + /** + * Handle the protocol message. + * + * @param route route + * @param queue queue info + */ + void setHandler(Route route, BlockingQueue queue); + + + /** + * Convert the message to ConnectRecord. + * + * @param message message + * @return ConnectRecord + */ + ConnectRecord convertToConnectRecord(Object message); +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/ProtocolFactory.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/ProtocolFactory.java new file mode 100644 index 0000000000..6e6100e88b --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/ProtocolFactory.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.protocol; + +import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.connector.http.source.protocol.impl.CloudEventProtocol; +import org.apache.eventmesh.connector.http.source.protocol.impl.CommonProtocol; +import org.apache.eventmesh.connector.http.source.protocol.impl.GitHubProtocol; + +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Protocol factory. This class is responsible for storing and creating instances of {@link Protocol} classes. + */ +public class ProtocolFactory { + + // protocol name -> protocol class + private static final ConcurrentHashMap> protocols = new ConcurrentHashMap<>(); + + static { + // register all protocols + registerProtocol(CloudEventProtocol.PROTOCOL_NAME, CloudEventProtocol.class); + registerProtocol(GitHubProtocol.PROTOCOL_NAME, GitHubProtocol.class); + registerProtocol(CommonProtocol.PROTOCOL_NAME, CommonProtocol.class); + } + + + /** + * Register a protocol + * + * @param name name of the protocol + * @param clazz class of the protocol + */ + public static void registerProtocol(String name, Class clazz) { + if (Protocol.class.isAssignableFrom(clazz)) { + // put the class into the map(case insensitive) + protocols.put(name.toLowerCase(), clazz); + } else { + throw new IllegalArgumentException("Class " + clazz.getName() + " does not implement Protocol interface"); + } + } + + /** + * Get an instance of a protocol, if it is not already created, create a new instance + * + * @param name name of the protocol + * @return instance of the protocol + */ + public static Protocol getInstance(SourceConnectorConfig sourceConnectorConfig, String name) { + // get the class by name(case insensitive) + Class clazz = Optional.ofNullable(protocols.get(name.toLowerCase())) + .orElseThrow(() -> new IllegalArgumentException("Protocol " + name + " is not registered")); + try { + // create a new instance + Protocol protocol = (Protocol) clazz.newInstance(); + // initialize the protocol + protocol.initialize(sourceConnectorConfig); + return protocol; + } catch (InstantiationException | IllegalAccessException e) { + throw new IllegalArgumentException("Failed to instantiate protocol " + name, e); + } + } + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/WebhookConstants.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/WebhookConstants.java new file mode 100644 index 0000000000..b31637427b --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/WebhookConstants.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.protocol; + +public class WebhookConstants { + + /** + * -------------------------------------- About GitHub -------------------------------------- + */ + + // A globally unique identifier (GUID) to identify the delivery. + public static final String GITHUB_DELIVERY = "X-GitHub-Delivery"; + + // This header is sent if the webhook is configured with a secret. + // We recommend that you use the more secure X-Hub-Signature-256 instead + public static final String GITHUB_SIGNATURE = "X-Hub-Signature"; + + // This header is sent if the webhook is configured with a secret + public static final String GITHUB_SIGNATURE_256 = "X-Hub-Signature-256"; + + public static final String GITHUB_HASH_265_PREFIX = "sha256="; + + // The name of the event that triggered the delivery. + public static final String GITHUB_EVENT = "X-GitHub-Event"; + + // The unique identifier of the webhook. + public static final String GITHUB_HOOK_ID = "X-GitHub-Hook-ID"; + + // The unique identifier of the resource where the webhook was created. + public static final String GITHUB_HOOK_INSTALLATION_TARGET_ID = "X-GitHub-Hook-Installation-Target-ID"; + + // The type of resource where the webhook was created. + public static final String GITHUB_HOOK_INSTALLATION_TARGET_TYPE = "X-GitHub-Hook-Installation-Target-Type"; + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java new file mode 100644 index 0000000000..a44ed0e90c --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CloudEventProtocol.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.protocol.impl; + +import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.connector.http.source.data.CommonResponse; +import org.apache.eventmesh.connector.http.source.protocol.Protocol; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.CloudEventUtil; + +import java.util.concurrent.BlockingQueue; + +import io.cloudevents.CloudEvent; +import io.cloudevents.http.vertx.VertxMessageFactory; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.vertx.core.http.HttpMethod; +import io.vertx.ext.web.Route; + +import lombok.extern.slf4j.Slf4j; + +/** + * CloudEvent Protocol. + */ +@Slf4j +public class CloudEventProtocol implements Protocol { + + // Protocol name + public static final String PROTOCOL_NAME = "CloudEvent"; + + + /** + * Initialize the protocol. + * + * @param sourceConnectorConfig source connector config + */ + @Override + public void initialize(SourceConnectorConfig sourceConnectorConfig) { + + } + + + /** + * Handle the protocol message for CloudEvent. + * + * @param route route + * @param queue queue info + */ + @Override + public void setHandler(Route route, BlockingQueue queue) { + route.method(HttpMethod.POST) + .handler(ctx -> VertxMessageFactory.createReader(ctx.request()) + .map(reader -> { + CloudEvent event = reader.toEvent(); + if (event.getSubject() == null) { + throw new IllegalStateException("attribute 'subject' cannot be null"); + } + if (event.getDataContentType() == null) { + throw new IllegalStateException("attribute 'datacontenttype' cannot be null"); + } + if (event.getData() == null) { + throw new IllegalStateException("attribute 'data' cannot be null"); + } + return event; + }) + .onSuccess(event -> { + // Add the event to the queue, thread-safe + if (!queue.offer(event)) { + throw new IllegalStateException("Failed to store the request."); + } + log.info("[HttpSourceConnector] Succeed to convert payload into CloudEvent. StatusCode={}", HttpResponseStatus.OK.code()); + ctx.response() + .setStatusCode(HttpResponseStatus.OK.code()) + .end(CommonResponse.success().toJsonStr()); + }) + .onFailure(t -> { + log.error("[HttpSourceConnector] Malformed request. StatusCode={}", HttpResponseStatus.BAD_REQUEST.code(), t); + ctx.response() + .setStatusCode(HttpResponseStatus.BAD_REQUEST.code()) + .end(CommonResponse.base(t.getMessage()).toJsonStr()); + })); + } + + /** + * Convert the message to ConnectRecord. + * + * @param message message + * @return ConnectRecord + */ + @Override + public ConnectRecord convertToConnectRecord(Object message) { + return CloudEventUtil.convertEventToRecord((CloudEvent) message); + } +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java new file mode 100644 index 0000000000..e831dc9723 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/CommonProtocol.java @@ -0,0 +1,139 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.protocol.impl; + +import org.apache.eventmesh.common.Constants; +import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.connector.http.source.data.CommonResponse; +import org.apache.eventmesh.connector.http.source.data.WebhookRequest; +import org.apache.eventmesh.connector.http.source.protocol.Protocol; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import java.util.Base64; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.stream.Collectors; + +import io.netty.handler.codec.http.HttpResponseStatus; +import io.vertx.core.http.HttpMethod; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.web.Route; +import io.vertx.ext.web.handler.BodyHandler; + +import lombok.extern.slf4j.Slf4j; + +/** + * Common Protocol. This class represents the common webhook protocol. The processing method of this class does not perform any other operations + * except storing the request and returning a general response. + */ +@Slf4j +public class CommonProtocol implements Protocol { + + public static final String PROTOCOL_NAME = "Common"; + + private SourceConnectorConfig sourceConnectorConfig; + + /** + * Initialize the protocol + * + * @param sourceConnectorConfig source connector config + */ + @Override + public void initialize(SourceConnectorConfig sourceConnectorConfig) { + this.sourceConnectorConfig = sourceConnectorConfig; + } + + /** + * Set the handler for the route + * + * @param route route + * @param queue queue info + */ + @Override + public void setHandler(Route route, BlockingQueue queue) { + route.method(HttpMethod.POST) + .handler(BodyHandler.create()) + .handler(ctx -> { + // Get the payload + Object payload = ctx.body().asString(Constants.DEFAULT_CHARSET.toString()); + payload = JsonUtils.parseObject(payload.toString(), String.class); + + // Create and store the webhook request + Map headerMap = ctx.request().headers().entries().stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + WebhookRequest webhookRequest = new WebhookRequest(PROTOCOL_NAME, ctx.request().absoluteURI(), headerMap, payload, ctx); + if (!queue.offer(webhookRequest)) { + throw new IllegalStateException("Failed to store the request."); + } + + if (!sourceConnectorConfig.isDataConsistencyEnabled()) { + // Return 200 OK + ctx.response() + .setStatusCode(HttpResponseStatus.OK.code()) + .end(CommonResponse.success().toJsonStr()); + } + + }) + .failureHandler(ctx -> { + log.error("Failed to handle the request. ", ctx.failure()); + + // Return Bad Response + ctx.response() + .setStatusCode(ctx.statusCode()) + .end(CommonResponse.base(ctx.failure().getMessage()).toJsonStr()); + }); + + } + + /** + * Convert the message to a connect record + * + * @param message message + * @return connect record + */ + @Override + public ConnectRecord convertToConnectRecord(Object message) { + WebhookRequest request = (WebhookRequest) message; + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), request.getPayload()); + connectRecord.addExtension("source", request.getProtocolName()); + connectRecord.addExtension("url", request.getUrl()); + request.getHeaders().forEach((k, v) -> { + if (k.equalsIgnoreCase("extension")) { + JsonObject extension = new JsonObject(v); + extension.forEach(e -> connectRecord.addExtension(e.getKey(), e.getValue())); + } + }); + // check recordUniqueId + if (!connectRecord.getExtensions().containsKey("recordUniqueId")) { + connectRecord.addExtension("recordUniqueId", connectRecord.getRecordId()); + } + + // check data + if (connectRecord.getExtensionObj("isBase64") != null) { + if (Boolean.parseBoolean(connectRecord.getExtensionObj("isBase64").toString())) { + byte[] data = Base64.getDecoder().decode(connectRecord.getData().toString()); + connectRecord.setData(data); + } + } + if (request.getRoutingContext() != null) { + connectRecord.addExtension("routingContext", request.getRoutingContext()); + } + return connectRecord; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java new file mode 100644 index 0000000000..e1edbd0faf --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/java/org/apache/eventmesh/connector/http/source/protocol/impl/GitHubProtocol.java @@ -0,0 +1,227 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source.protocol.impl; + +import org.apache.eventmesh.common.Constants; +import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.connector.http.source.data.CommonResponse; +import org.apache.eventmesh.connector.http.source.data.WebhookRequest; +import org.apache.eventmesh.connector.http.source.protocol.Protocol; +import org.apache.eventmesh.connector.http.source.protocol.WebhookConstants; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; + +import org.apache.commons.lang3.BooleanUtils; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.stream.Collectors; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; + +import io.netty.handler.codec.http.HttpResponseStatus; +import io.vertx.core.MultiMap; +import io.vertx.core.http.HttpMethod; +import io.vertx.ext.web.Route; +import io.vertx.ext.web.handler.BodyHandler; + +import com.alibaba.fastjson2.JSONObject; + +import lombok.extern.slf4j.Slf4j; + + +/** + * GitHub Protocol. This class represents the GitHub webhook protocol. + */ +@Slf4j +public class GitHubProtocol implements Protocol { + + // Protocol name + public static final String PROTOCOL_NAME = "GitHub"; + + private static final String H_MAC_SHA_265 = "HmacSHA256"; + + private static final String SECRET_KEY = "secret"; + + private String contentType = "application/json"; + + private String secret; + + + /** + * Initialize the protocol. + * + * @param sourceConnectorConfig source connector config + */ + @Override + public void initialize(SourceConnectorConfig sourceConnectorConfig) { + // Initialize the protocol + Map extraConfig = sourceConnectorConfig.getExtraConfig(); + // set the secret, if it is not set, throw an exception + this.secret = extraConfig.get(SECRET_KEY); + if (StringUtils.isBlank(this.secret)) { + throw new EventMeshException("The secret is required for GitHub protocol."); + } + // if the content-type is not set, use the default value + this.contentType = extraConfig.getOrDefault("contentType", contentType); + } + + /** + * Handle the protocol message for GitHub. + * + * @param route route + * @param queue queue info + */ + @Override + public void setHandler(Route route, BlockingQueue queue) { + route.method(HttpMethod.POST) + .handler(BodyHandler.create()) + .handler(ctx -> { + // Get the payload and headers + String payloadStr = ctx.body().asString(Constants.DEFAULT_CHARSET.toString()); + MultiMap headers = ctx.request().headers(); + + // validate the content type + if (!StringUtils.contains(headers.get("Content-Type"), contentType)) { + String errorMsg = String.format("content-type is invalid, please check the content-type. received content-type: %s", + headers.get("Content-Type")); + // Return Bad Request + ctx.fail(HttpResponseStatus.BAD_REQUEST.code(), new EventMeshException(errorMsg)); + return; + } + + // validate the signature + String signature = headers.get(WebhookConstants.GITHUB_SIGNATURE_256); + if (BooleanUtils.isFalse(validateSignature(signature, payloadStr, secret))) { + String errorMsg = String.format("signature is invalid, please check the secret. received signature: %s", signature); + // Return Bad Request + ctx.fail(HttpResponseStatus.BAD_REQUEST.code(), new EventMeshException(errorMsg)); + return; + } + + // if the content type is form data, convert it to json string + if (StringUtils.contains(contentType, "application/x-www-form-urlencoded") + || StringUtils.contains(contentType, "multipart/form-data")) { + /* + Convert form data to json string. There are the following benefits: + 1. Raw form data is not decoded, so it is not easy to process directly. + 2. Converted to reduce storage space by more than 20 percent. Experimental result: 10329 bytes -> 7893 bytes. + */ + JSONObject payloadObj = new JSONObject(); + ctx.request().formAttributes().forEach(entry -> payloadObj.put(entry.getKey(), entry.getValue())); + payloadStr = payloadObj.toJSONString(); + } + + // Create and store the webhook request + Map headerMap = headers.entries().stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + WebhookRequest webhookRequest = new WebhookRequest(PROTOCOL_NAME, ctx.request().absoluteURI(), headerMap, payloadStr, ctx); + + if (!queue.offer(webhookRequest)) { + throw new IllegalStateException("Failed to store the request."); + } + + // Return 200 OK + ctx.response() + .setStatusCode(HttpResponseStatus.OK.code()) + .end(CommonResponse.success().toJsonStr()); + }) + .failureHandler(ctx -> { + log.error("Failed to handle the request from github. ", ctx.failure()); + + // Return Bad Response + ctx.response() + .setStatusCode(ctx.statusCode()) + .end(CommonResponse.base(ctx.failure().getMessage()).toJsonStr()); + }); + } + + + /** + * Validate the signature. + * + * @param signature signature + * @param payload payload + * @param secret secret + * @return boolean + */ + public boolean validateSignature(String signature, String payload, String secret) { + String hash = WebhookConstants.GITHUB_HASH_265_PREFIX; + try { + Mac sha = Mac.getInstance(H_MAC_SHA_265); + SecretKeySpec secretKey = new SecretKeySpec(secret.getBytes(Constants.DEFAULT_CHARSET), H_MAC_SHA_265); + sha.init(secretKey); + byte[] bytes = sha.doFinal(payload.getBytes(Constants.DEFAULT_CHARSET)); + hash += byteArrayToHexString(bytes); + } catch (Exception e) { + throw new EventMeshException("Error occurred while validating the signature.", e); + } + + return hash.equals(signature); + } + + + /** + * Convert the byte array to hex string. + * + * @param bytes bytes + * @return String + */ + private String byteArrayToHexString(byte[] bytes) { + if (bytes == null) { + return ""; + } + + StringBuilder sb = new StringBuilder(); + for (byte b : bytes) { + String hex = Integer.toHexString(0xFF & b); + if (hex.length() == 1) { + // If the length is 1, append 0 + sb.append('0'); + } + sb.append(hex); + } + + return sb.toString(); + } + + + /** + * Convert the message to ConnectRecord. + * + * @param message message + * @return ConnectRecord + */ + @Override + public ConnectRecord convertToConnectRecord(Object message) { + WebhookRequest request = (WebhookRequest) message; + Map headers = request.getHeaders(); + + // Create the ConnectRecord + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), request.getPayload()); + connectRecord.addExtension("id", headers.get(WebhookConstants.GITHUB_DELIVERY)); + connectRecord.addExtension("topic", headers.get(WebhookConstants.GITHUB_EVENT)); + connectRecord.addExtension("source", headers.get(request.getProtocolName())); + connectRecord.addExtension("type", headers.get(WebhookConstants.GITHUB_HOOK_INSTALLATION_TARGET_TYPE)); + return connectRecord; + } + + +} diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService new file mode 100644 index 0000000000..d62ff11992 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.api.ConnectorCreateService @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +HTTP-Source=org.apache.eventmesh.connector.http.source.HttpSourceConnector +HTTP-Sink=org.apache.eventmesh.connector.http.sink.HttpSinkConnector diff --git a/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml index 9fcc471d3b..0a73e627b0 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml +++ b/eventmesh-connectors/eventmesh-connector-http/src/main/resources/source-config.yml @@ -28,4 +28,9 @@ connectorConfig: connectorName: httpSource path: /test port: 3755 - idleTimeout: 5 \ No newline at end of file + idleTimeout: 5000 # timeunit: ms + maxFormAttributeSize: 1048576 # timeunit: byte, default: 1048576(1MB). This applies only when handling form data submissions. + protocol: CloudEvent # Case insensitive, default: CloudEvent, options: CloudEvent, GitHub, Common + extraConfig: # extra config for different protocol, e.g. GitHub secret + secret: xxxxxxx # GitHub secret + contentType: application/json # GitHub content type \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/connector/HttpSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java similarity index 51% rename from eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/connector/HttpSinkConnectorTest.java rename to eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java index 738df6430b..be2b52e737 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/connector/HttpSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/sink/HttpSinkConnectorTest.java @@ -15,40 +15,40 @@ * limitations under the License. */ -package org.apache.eventmesh.connector.http.source.connector; +package org.apache.eventmesh.connector.http.sink; + import static org.mockserver.model.HttpRequest.request; -import org.apache.eventmesh.connector.http.sink.HttpSinkConnector; -import org.apache.eventmesh.connector.http.sink.config.HttpSinkConfig; -import org.apache.eventmesh.connector.http.sink.config.HttpWebhookConfig; +import org.apache.eventmesh.common.config.connector.http.HttpSinkConfig; +import org.apache.eventmesh.common.config.connector.http.HttpWebhookConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; +import org.apache.hc.client5.http.fluent.Request; +import org.apache.hc.core5.http.HttpStatus; +import org.apache.hc.core5.net.URIBuilder; + import java.net.URI; +import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockserver.integration.ClientAndServer; -import org.mockserver.model.HttpRequest; import org.mockserver.model.HttpResponse; import org.mockserver.model.MediaType; + import com.alibaba.fastjson2.JSON; import com.alibaba.fastjson2.JSONArray; import com.alibaba.fastjson2.JSONObject; -import okhttp3.HttpUrl; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; -import okhttp3.ResponseBody; public class HttpSinkConnectorTest { @@ -56,42 +56,44 @@ public class HttpSinkConnectorTest { private HttpSinkConfig sinkConfig; - private URI severUri; + private URL url; private ClientAndServer mockServer; + private static final AtomicInteger counter = new AtomicInteger(0); @BeforeEach void before() throws Exception { // init sinkConnector - this.sinkConnector = new HttpSinkConnector(); - this.sinkConfig = (HttpSinkConfig) ConfigUtil.parse(sinkConnector.configClass()); - this.sinkConnector.init(this.sinkConfig); - this.sinkConnector.start(); + sinkConnector = new HttpSinkConnector(); + sinkConfig = (HttpSinkConfig) ConfigUtil.parse(sinkConnector.configClass()); + sinkConnector.init(this.sinkConfig); + sinkConnector.start(); - this.severUri = URI.create(sinkConfig.connectorConfig.getUrls()[0]); + url = new URL(sinkConfig.connectorConfig.getUrls()[0]); // start mockServer - mockServer = ClientAndServer.startClientAndServer(severUri.getPort()); + mockServer = ClientAndServer.startClientAndServer(url.getPort()); mockServer.reset() .when( request() .withMethod("POST") - .withPath(severUri.getPath()) + .withPath(url.getPath()) ) .respond( httpRequest -> { - JSONObject requestBody = JSON.parseObject(httpRequest.getBodyAsString()); + // Increase the number of requests received + counter.incrementAndGet(); return HttpResponse.response() .withContentType(MediaType.APPLICATION_JSON) - .withStatusCode(200) + .withStatusCode(HttpStatus.SC_OK) .withBody(new JSONObject() .fluentPut("code", 0) .fluentPut("message", "success") - .fluentPut("data", requestBody.getJSONObject("data").get("data")) .toJSONString() ); // .withDelay(TimeUnit.SECONDS, 10); } ); + } @AfterEach @@ -103,68 +105,61 @@ void after() throws Exception { @Test void testPut() throws Exception { // Create a list of ConnectRecord - final int times = 10; + final int size = 10; List connectRecords = new ArrayList<>(); - for (int i = 0; i < times; i++) { + for (int i = 0; i < size; i++) { ConnectRecord record = createConnectRecord(); connectRecords.add(record); } // Put ConnectRecord sinkConnector.put(connectRecords); - // sleep 5s - Thread.sleep(5000); - - // verify request - HttpRequest[] recordedRequests = mockServer.retrieveRecordedRequests(null); - assert recordedRequests.length == times; + // wait for receiving request + final int times = 5000; // 5 seconds + long start = System.currentTimeMillis(); + while (counter.get() < size) { + if (System.currentTimeMillis() - start > times) { + // timeout + Assertions.fail("The number of requests received=" + counter.get() + " is less than the number of ConnectRecord=" + size); + } else { + Thread.sleep(100); + } + } // verify response HttpWebhookConfig webhookConfig = sinkConfig.connectorConfig.getWebhookConfig(); - String url = new HttpUrl.Builder() - .scheme("http") - .host(severUri.getHost()) - .port(webhookConfig.getPort()) - .addPathSegments(webhookConfig.getExportPath()) - .addQueryParameter("pageNum", "1") - .addQueryParameter("pageSize", "10") - .addQueryParameter("type", "poll") - .build().toString(); - - // build request - Request request = new Request.Builder() - .url(url) - .addHeader("Content-Type", "application/json") + + URI exportUrl = new URIBuilder() + .setScheme("http") + .setHost(url.getHost()) + .setPort(webhookConfig.getPort()) + .setPath(webhookConfig.getExportPath()) + .addParameter("pageNum", "1") + .addParameter("pageSize", "10") + .addParameter("type", "poll") .build(); - OkHttpClient client = new OkHttpClient(); - try (Response response = client.newCall(request).execute()) { - // check response code - if (!response.isSuccessful()) { - throw new RuntimeException("Unexpected response code: " + response); - } - // check response body - ResponseBody responseBody = response.body(); - if (responseBody != null) { - JSONObject jsonObject = JSON.parseObject(responseBody.string()); + Request.get(exportUrl) + .execute() + .handleResponse(response -> { + // check response code + Assertions.assertEquals(HttpStatus.SC_OK, response.getCode()); + // check response body + JSONObject jsonObject = JSON.parseObject(response.getEntity().getContent()); JSONArray pageItems = jsonObject.getJSONArray("pageItems"); - assert pageItems != null && pageItems.size() == times; - - for (int i = 0; i < times; i++) { + Assertions.assertNotNull(pageItems); + Assertions.assertEquals(size, pageItems.size()); + for (int i = 0; i < size; i++) { JSONObject pageItem = pageItems.getJSONObject(i); - assert pageItem != null; - assert pageItem.getJSONObject("data") != null; - assert pageItem.getJSONObject("metadata") != null; + Assertions.assertNotNull(pageItem); } - } - } + return null; + }); } private ConnectRecord createConnectRecord() { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); long timestamp = System.currentTimeMillis(); - return new ConnectRecord(partition, offset, timestamp, UUID.randomUUID().toString()); + return new ConnectRecord(null, null, timestamp, UUID.randomUUID().toString()); } } diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/HttpSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/HttpSourceConnectorTest.java new file mode 100644 index 0000000000..0dbac47653 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/HttpSourceConnectorTest.java @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.http.source; + + +import org.apache.eventmesh.common.config.connector.http.HttpSourceConfig; +import org.apache.eventmesh.common.config.connector.http.SourceConnectorConfig; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; + +import org.apache.hc.client5.http.fluent.Request; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpStatus; +import org.apache.hc.core5.http.io.entity.StringEntity; + +import java.io.IOException; +import java.net.URL; +import java.util.List; +import java.util.Objects; +import java.util.UUID; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + + +class HttpSourceConnectorTest { + + private static HttpSourceConnector connector; + private static String url; + private static final String expectedMessage = "testHttpMessage"; + private static final int batchSize = 10; + + + @BeforeAll + static void setUpAll() throws Exception { + connector = new HttpSourceConnector(); + final HttpSourceConfig sourceConfig = (HttpSourceConfig) ConfigUtil.parse(connector.configClass()); + final SourceConnectorConfig config = sourceConfig.getConnectorConfig(); + // initialize and start the connector + connector.init(sourceConfig); + connector.start(); + + // wait for the connector to start + long timeout = 5000; // 5 seconds + long start = System.currentTimeMillis(); + while (!connector.isStarted()) { + if (System.currentTimeMillis() - start > timeout) { + // timeout + Assertions.fail("Failed to start the connector"); + } else { + Thread.sleep(100); + } + } + + url = new URL("http", "127.0.0.1", config.getPort(), config.getPath()).toString(); + } + + @AfterAll + static void tearDownAll() throws IOException { + connector.stop(); + } + + + @Test + void testPollForBinaryRequest() { + for (int i = 0; i < batchSize; i++) { + try { + // Set the request body + StringEntity entity = new StringEntity(expectedMessage, ContentType.TEXT_PLAIN); + + Request.post(url) + .addHeader("Content-Type", "text/plain") + .addHeader("ce-id", String.valueOf(UUID.randomUUID())) + .addHeader("ce-specversion", "1.0") + .addHeader("ce-type", "com.example.someevent") + .addHeader("ce-source", "/mycontext") + .addHeader("ce-subject", "test") + .body(entity) + .execute() + .handleResponse(res -> { + Assertions.assertEquals(HttpStatus.SC_OK, res.getCode()); + return null; + }); + } catch (IOException e) { + Assertions.fail("Failed to send request", e); + } + } + List res = connector.poll(); + Assertions.assertEquals(batchSize, res.size()); + for (ConnectRecord r : res) { + Assertions.assertEquals(expectedMessage, new String((byte[]) r.getData())); + } + } + + @Test + void testPollForStructuredRequest() { + for (int i = 0; i < batchSize; i++) { + try { + // Create a CloudEvent + TestEvent event = new TestEvent(); + event.id = String.valueOf(UUID.randomUUID()); + event.specversion = "1.0"; + event.type = "com.example.someevent"; + event.source = "/mycontext"; + event.subject = "test"; + event.datacontenttype = "text/plain"; + event.data = expectedMessage; + + // Set the request body + StringEntity entity = new StringEntity(Objects.requireNonNull(JsonUtils.toJSONString(event)), ContentType.APPLICATION_JSON); + + // Send the request and return the response + Request.post(url) + .addHeader("Content-Type", "application/cloudevents+json") + .body(entity) + .execute() + .handleResponse(res -> { + Assertions.assertEquals(HttpStatus.SC_OK, res.getCode()); + return null; + }); + } catch (IOException e) { + Assertions.fail("Failed to send request", e); + } + } + List res = connector.poll(); + Assertions.assertEquals(batchSize, res.size()); + for (ConnectRecord r : res) { + Assertions.assertEquals(expectedMessage, new String((byte[]) r.getData())); + } + } + + + @Test + void testPollForInvalidRequest() { + // Send a bad request. + try { + Request.post(url) + .addHeader("Content-Type", "text/plain") + .execute() + .handleResponse(res -> { + // Check the response code + Assertions.assertEquals(HttpStatus.SC_BAD_REQUEST, res.getCode()); + return null; + }); + } catch (IOException e) { + Assertions.fail("Failed to send request", e); + } + } + + class TestEvent { + + public String specversion; + public String type; + public String source; + public String subject; + public String datacontenttype; + public String id; + + public String data; + } +} \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/connector/HttpSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/connector/HttpSourceConnectorTest.java deleted file mode 100644 index 35d58b75c1..0000000000 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/java/org/apache/eventmesh/connector/http/source/connector/HttpSourceConnectorTest.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.connector.http.source.connector; - -import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.connector.http.source.config.HttpSourceConfig; -import org.apache.eventmesh.connector.http.source.config.SourceConnectorConfig; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.util.ConfigUtil; - -import org.apache.http.HttpHeaders; -import org.apache.http.HttpResponse; -import org.apache.http.HttpStatus; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; - -import java.util.List; -import java.util.UUID; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class HttpSourceConnectorTest { - - private HttpSourceConnector connector; - private SourceConnectorConfig config; - private CloseableHttpClient httpClient; - private String uri; - private final String expectedMessage = "testHttpMessage"; - - @BeforeEach - void setUp() throws Exception { - connector = new HttpSourceConnector(); - HttpSourceConfig sourceConfig = (HttpSourceConfig) ConfigUtil.parse(connector.configClass()); - config = sourceConfig.getConnectorConfig(); - connector.init(sourceConfig); - connector.start(); - - uri = new URIBuilder().setScheme("http").setHost("127.0.0.1").setPort(config.getPort()).setPath(config.getPath()).build().toString(); - - httpClient = HttpClients.createDefault(); - } - - @Test - void testPoll() throws Exception { - final int batchSize = 10; - // test binary content mode - for (int i = 0; i < batchSize; i++) { - HttpResponse resp = mockBinaryRequest(); - Assertions.assertEquals(resp.getStatusLine().getStatusCode(), HttpStatus.SC_OK); - - } - List res = connector.poll(); - Assertions.assertEquals(batchSize, res.size()); - for (ConnectRecord r : res) { - Assertions.assertEquals(expectedMessage, new String((byte[]) r.getData())); - } - - // test structured content mode - for (int i = 0; i < batchSize; i++) { - HttpResponse resp = mockStructuredRequest(); - Assertions.assertEquals(resp.getStatusLine().getStatusCode(), HttpStatus.SC_OK); - } - res = connector.poll(); - Assertions.assertEquals(batchSize, res.size()); - for (ConnectRecord r : res) { - Assertions.assertEquals(expectedMessage, new String((byte[]) r.getData())); - } - - // test invalid requests - HttpPost invalidPost = new HttpPost(uri); - invalidPost.setHeader(HttpHeaders.CONTENT_TYPE, "text/plain"); - invalidPost.setHeader("ce-id", String.valueOf(UUID.randomUUID())); - HttpResponse resp = httpClient.execute(invalidPost); - Assertions.assertEquals(HttpStatus.SC_BAD_REQUEST, resp.getStatusLine().getStatusCode()); - } - - HttpResponse mockBinaryRequest() throws Exception { - HttpPost httpPost = new HttpPost(uri); - httpPost.setHeader(HttpHeaders.CONTENT_TYPE, "text/plain"); - httpPost.setHeader("ce-id", String.valueOf(UUID.randomUUID())); - httpPost.setHeader("ce-specversion", "1.0"); - httpPost.setHeader("ce-type", "com.example.someevent"); - httpPost.setHeader("ce-source", "/mycontext"); - httpPost.setHeader("ce-subject", "test"); - httpPost.setEntity(new StringEntity(expectedMessage)); - - return httpClient.execute(httpPost); - } - - HttpResponse mockStructuredRequest() throws Exception { - HttpPost httpPost = new HttpPost(uri); - // according to the CloudEvent specification, a json format event MUST use the media type `application/cloudevents+json` - httpPost.setHeader(HttpHeaders.CONTENT_TYPE, "application/cloudevents+json"); - TestEvent event = new TestEvent(); - event.id = String.valueOf(UUID.randomUUID()); - event.specversion = "1.0"; - event.type = "com.example.someevent"; - event.source = "/mycontext"; - event.subject = "test"; - event.datacontenttype = "text/plain"; - event.data = expectedMessage; - httpPost.setEntity(new StringEntity(JsonUtils.toJSONString(event))); - - return httpClient.execute(httpPost); - } - - @AfterEach - void tearDown() throws Exception { - connector.stop(); - httpClient.close(); - } - - class TestEvent { - - public String specversion; - public String type; - public String source; - public String subject; - public String datacontenttype; - public String id; - - public String data; - } -} \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml b/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml index 0a3e68d070..336bb2cb5e 100644 --- a/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml +++ b/eventmesh-connectors/eventmesh-connector-http/src/test/resources/source-config.yml @@ -27,4 +27,10 @@ pubSubConfig: connectorConfig: connectorName: httpSource path: /test - port: 3755 \ No newline at end of file + port: 3755 + idleTimeout: 5000 # timeunit: ms + maxFormAttributeSize: 1048576 # timeunit: byte, default: 1048576(1MB). This applies only when handling form data submissions. + protocol: CloudEvent # Case insensitive, default: CloudEvent, options: CloudEvent, GitHub, Common + extraConfig: # extra config for different protocol, e.g. GitHub secret + secret: xxxxxxx # GitHub secret + contentType: application/json # GitHub content type \ No newline at end of file diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle b/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle index 7ee333e868..b70bf6d357 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-jdbc/build.gradle @@ -34,15 +34,15 @@ packageSources { } dependencies { - antlr("org.antlr:antlr4:4.13.0") + antlr("org.antlr:antlr4:4.13.1") implementation 'org.antlr:antlr4-runtime:4.13.1' - implementation 'com.alibaba:druid:1.2.20' + implementation 'com.alibaba:druid:1.2.23' compileOnly 'org.hibernate:hibernate-core:5.6.15.Final' implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation project(":eventmesh-spi") - implementation 'com.zendesk:mysql-binlog-connector-java:0.29.2' - compileOnly 'mysql:mysql-connector-java:8.0.32' + implementation 'com.zendesk:mysql-binlog-connector-java:0.30.1' + compileOnly 'com.mysql:mysql-connector-j' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcServerConfig.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcServerConfig.java index 451ecf71e9..2b3e614774 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/config/JdbcServerConfig.java @@ -17,7 +17,8 @@ package org.apache.eventmesh.connector.jdbc.config; -import org.apache.eventmesh.openconnect.api.config.Config; + +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/JdbcConnection.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/JdbcConnection.java index d1802d8b96..70d553517f 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/JdbcConnection.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/JdbcConnection.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.jdbc.connection; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; import org.apache.eventmesh.connector.jdbc.JdbcDriverMetaData; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/mysql/MysqlJdbcConnection.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/mysql/MysqlJdbcConnection.java index 2a7514c09a..d62aebba56 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/mysql/MysqlJdbcConnection.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/connection/mysql/MysqlJdbcConnection.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.jdbc.connection.mysql; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; import org.apache.eventmesh.connector.jdbc.connection.JdbcConnection; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlDialectSql; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/AbstractGeneralDatabaseDialect.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/AbstractGeneralDatabaseDialect.java index 0a7463a187..0ba6ab715f 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/AbstractGeneralDatabaseDialect.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/AbstractGeneralDatabaseDialect.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.jdbc.dialect; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; import org.apache.eventmesh.connector.jdbc.connection.JdbcConnection; import org.apache.eventmesh.connector.jdbc.exception.JdbcConnectionException; import org.apache.eventmesh.connector.jdbc.table.catalog.Column; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/DatabaseDialectFactory.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/DatabaseDialectFactory.java index aad7984520..3ad607e455 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/DatabaseDialectFactory.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/DatabaseDialectFactory.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.jdbc.dialect; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; import org.apache.eventmesh.spi.EventMeshExtensionType; import org.apache.eventmesh.spi.EventMeshSPI; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialect.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialect.java index acd8730c2e..1a4bb02fb6 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialect.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialect.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.jdbc.dialect.mysql; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; import org.apache.eventmesh.connector.jdbc.DataTypeConvertor; import org.apache.eventmesh.connector.jdbc.JdbcDriverMetaData; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; import org.apache.eventmesh.connector.jdbc.connection.mysql.MysqlJdbcConnection; import org.apache.eventmesh.connector.jdbc.dialect.AbstractGeneralDatabaseDialect; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseType; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialectFactory.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialectFactory.java index eb4fbe3275..b5fb87a016 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialectFactory.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/dialect/mysql/MysqlDatabaseDialectFactory.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.jdbc.dialect.mysql; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialectFactory; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/JdbcSinkConnector.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/JdbcSinkConnector.java index 7a5c68f581..cc00f1e142 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/JdbcSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/JdbcSinkConnector.java @@ -17,17 +17,17 @@ package org.apache.eventmesh.connector.jdbc.sink; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSinkConfig; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.jdbc.JdbcConnectData; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialectFactory; -import org.apache.eventmesh.connector.jdbc.sink.config.JdbcSinkConfig; import org.apache.eventmesh.connector.jdbc.sink.handle.DefaultSinkRecordHandler; import org.apache.eventmesh.connector.jdbc.sink.handle.SinkRecordHandler; import org.apache.eventmesh.connector.jdbc.sink.hibernate.HibernateConfiguration; import org.apache.eventmesh.connector.jdbc.source.JdbcAllFactoryLoader; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -139,6 +139,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + /** * Stops the Connector. * diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/handle/DefaultSinkRecordHandler.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/handle/DefaultSinkRecordHandler.java index a4ba77ae5d..db684d63c7 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/handle/DefaultSinkRecordHandler.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/sink/handle/DefaultSinkRecordHandler.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.jdbc.sink.handle; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSinkConfig; import org.apache.eventmesh.common.utils.LogUtil; import org.apache.eventmesh.connector.jdbc.CatalogChanges; import org.apache.eventmesh.connector.jdbc.DataChanges; @@ -28,7 +29,6 @@ import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; import org.apache.eventmesh.connector.jdbc.event.DataChangeEventType; import org.apache.eventmesh.connector.jdbc.event.SchemaChangeEventType; -import org.apache.eventmesh.connector.jdbc.sink.config.JdbcSinkConfig; import org.apache.eventmesh.connector.jdbc.source.SourceMateData; import org.apache.eventmesh.connector.jdbc.table.catalog.Column; import org.apache.eventmesh.connector.jdbc.type.Type; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractEngine.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractEngine.java index 08c3823725..2088e75632 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractEngine.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractEngine.java @@ -18,9 +18,9 @@ package org.apache.eventmesh.connector.jdbc.source; import org.apache.eventmesh.common.ThreadWrapper; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; -import org.apache.eventmesh.connector.jdbc.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; import org.apache.commons.collections4.CollectionUtils; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractJdbcTaskManager.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractJdbcTaskManager.java index b398ffa17b..ff2fd8ba00 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractJdbcTaskManager.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/AbstractJdbcTaskManager.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.jdbc.source; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; import java.util.ArrayList; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java index 8d7d9cb66d..ecc5a44154 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/JdbcSourceConnector.java @@ -17,10 +17,12 @@ package org.apache.eventmesh.connector.jdbc.source; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialectFactory; import org.apache.eventmesh.connector.jdbc.event.Event; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.CdcEngine; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.CdcEngineFactory; import org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.SnapshotEngine; @@ -28,8 +30,6 @@ import org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.SnapshotResult; import org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.SnapshotResult.SnapshotResultStatus; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; -import org.apache.eventmesh.openconnect.api.config.Config; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnector; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; @@ -142,7 +142,9 @@ private void doInit() { this.dispatcher = new EventDispatcher(this.sourceJdbcTaskManager); - this.taskManagerCoordinator = new TaskManagerCoordinator(); + this.taskManagerCoordinator = new TaskManagerCoordinator(sourceConfig.getPollConfig().getCapacity(), + sourceConfig.getPollConfig().getMaxBatchSize(), + sourceConfig.getPollConfig().getMaxWaitTime()); this.taskManagerCoordinator.registerTaskManager(SourceJdbcTaskManager.class.getName(), sourceJdbcTaskManager); this.taskManagerCoordinator.init(); } @@ -192,6 +194,11 @@ public String name() { return "JDBC Source Connector"; } + @Override + public void onException(ConnectRecord record) { + + } + /** * Stops the Connector. * @@ -204,9 +211,6 @@ public void stop() throws Exception { @Override public List poll() { - - List connectRecords = this.taskManagerCoordinator.poll(); - - return connectRecords; + return this.taskManagerCoordinator.poll(); } } diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/SourceJdbcTaskManager.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/SourceJdbcTaskManager.java index be38ffb75a..0625dbfad7 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/SourceJdbcTaskManager.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/SourceJdbcTaskManager.java @@ -17,15 +17,17 @@ package org.apache.eventmesh.connector.jdbc.source; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.jdbc.JdbcRecordOffset; +import org.apache.eventmesh.common.remote.offset.jdbc.JdbcRecordPartition; import org.apache.eventmesh.connector.jdbc.JdbcConnectData; import org.apache.eventmesh.connector.jdbc.event.Event; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.RandomTaskSelectStrategy; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.TaskSelectStrategy; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.util.Collections; import java.util.HashSet; @@ -68,8 +70,8 @@ private void doHandleEvent(Event event) { return; } JdbcConnectData jdbcConnectData = event.getJdbcConnectData(); - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); + RecordPartition partition = new JdbcRecordPartition(); + RecordOffset offset = new JdbcRecordOffset(); ConnectRecord record = new ConnectRecord(partition, offset, System.currentTimeMillis(), jdbcConnectData); List records = Collections.singletonList(record); for (TaskManagerListener listener : listeners) { diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java index c299fbc531..8efb8cbc71 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/TaskManagerCoordinator.java @@ -40,16 +40,16 @@ @Slf4j public class TaskManagerCoordinator { - private static final int BATCH_MAX = 10; - private static final int DEFAULT_QUEUE_SIZE = 1 << 13; + private final BlockingQueue recordBlockingQueue; + private final Map taskManagerCache = new HashMap<>(8); + private final int maxBatchSize; + private final long maxPollTimeout; - private BlockingQueue recordBlockingQueue = new LinkedBlockingQueue<>(DEFAULT_QUEUE_SIZE); - private Map taskManagerCache = new HashMap<>(8); - /** - * Constructs a new TaskManagerCoordinator. - */ - public TaskManagerCoordinator() { + public TaskManagerCoordinator(int capacity, int maxBatchSize, long maxPollTimeout) { + this.recordBlockingQueue = new LinkedBlockingQueue<>(capacity); + this.maxBatchSize = maxBatchSize; + this.maxPollTimeout = maxPollTimeout; } /** @@ -96,10 +96,13 @@ public void start() { * @return A list of ConnectRecords, up to the maximum batch size defined by BATCH_MAX. */ public List poll() { - List records = new ArrayList<>(BATCH_MAX); - for (int index = 0; index < BATCH_MAX; ++index) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollTimeout; + + List records = new ArrayList<>(maxBatchSize); + for (int index = 0; index < maxBatchSize; ++index) { try { - ConnectRecord record = recordBlockingQueue.poll(3, TimeUnit.SECONDS); + ConnectRecord record = recordBlockingQueue.poll(remainingTime, TimeUnit.MILLISECONDS); if (Objects.isNull(record)) { break; } @@ -107,6 +110,10 @@ public List poll() { log.debug("record:{}", JsonUtils.toJSONString(record)); } records.add(record); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollTimeout > elapsedTime ? maxPollTimeout - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/MysqlAntlr4DdlParser.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/MysqlAntlr4DdlParser.java index 9bb110815f..261da6192b 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/MysqlAntlr4DdlParser.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/MysqlAntlr4DdlParser.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.antlr4.Antlr4DdlParser; import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlLexer; import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParser; @@ -25,7 +26,6 @@ import org.apache.eventmesh.connector.jdbc.antlr4.listener.Antlr4DdlParserListener; import org.apache.eventmesh.connector.jdbc.ddl.DdlParserCallback; import org.apache.eventmesh.connector.jdbc.event.Event; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.listener.MySqlAntlr4DdlParserListener; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; import org.apache.eventmesh.connector.jdbc.utils.JdbcStringUtils; @@ -113,7 +113,7 @@ public void runIfAllNotNull(Runnable runner, Object... nullableObjects) { */ public TableId parseTableId(String fullIdText) { // Remove special characters from the full ID text - String sanitizedText = StringUtils.replaceEach(fullIdText, new String[]{"'\\''", "\"", "`"}, new String[]{"", "", ""}); + String sanitizedText = StringUtils.replaceEach(fullIdText, new String[] {"'\\''", "\"", "`"}, new String[] {"", "", ""}); // Split the sanitized text by dot (.) to separate catalog and table name String[] split = sanitizedText.split("\\."); diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateDatabaseParserListener.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateDatabaseParserListener.java index a5d15820a9..71b4866e74 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateDatabaseParserListener.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateDatabaseParserListener.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.listener; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.CatalogChanges; import org.apache.eventmesh.connector.jdbc.Payload; import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParser.CreateDatabaseContext; @@ -24,7 +25,6 @@ import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParserBaseListener; import org.apache.eventmesh.connector.jdbc.event.CreateDatabaseEvent; import org.apache.eventmesh.connector.jdbc.event.SchemaChangeEventType; -import org.apache.eventmesh.connector.jdbc.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.MysqlAntlr4DdlParser; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlSourceMateData; import org.apache.eventmesh.connector.jdbc.table.catalog.CatalogSchema; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateTableParserListener.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateTableParserListener.java index 4e30da93f9..044403f778 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateTableParserListener.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/CreateTableParserListener.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.listener; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.CatalogChanges; import org.apache.eventmesh.connector.jdbc.Payload; import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParser.ColumnCreateTableContext; @@ -29,7 +30,6 @@ import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParser.TableOptionEngineContext; import org.apache.eventmesh.connector.jdbc.event.CreateTableEvent; import org.apache.eventmesh.connector.jdbc.event.SchemaChangeEventType; -import org.apache.eventmesh.connector.jdbc.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.MysqlAntlr4DdlParser; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlSourceMateData; import org.apache.eventmesh.connector.jdbc.table.catalog.Table; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/DropDatabaseParserListener.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/DropDatabaseParserListener.java index 22a25fcd32..c582df4a15 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/DropDatabaseParserListener.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/antlr4/mysql/listener/DropDatabaseParserListener.java @@ -17,13 +17,13 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.listener; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.CatalogChanges; import org.apache.eventmesh.connector.jdbc.Payload; import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParser.DropDatabaseContext; import org.apache.eventmesh.connector.jdbc.antlr4.autogeneration.MySqlParserBaseListener; import org.apache.eventmesh.connector.jdbc.event.DropDatabaseEvent; import org.apache.eventmesh.connector.jdbc.event.SchemaChangeEventType; -import org.apache.eventmesh.connector.jdbc.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.MysqlAntlr4DdlParser; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlSourceMateData; import org.apache.eventmesh.connector.jdbc.table.catalog.CatalogSchema; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/AbstractCdcEngine.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/AbstractCdcEngine.java index 1fb1a95579..e7538659bd 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/AbstractCdcEngine.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/AbstractCdcEngine.java @@ -18,13 +18,13 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.cdc; import org.apache.eventmesh.common.ThreadWrapper; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.JdbcContext; import org.apache.eventmesh.connector.jdbc.ddl.DdlParser; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; -import org.apache.eventmesh.connector.jdbc.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; import org.apache.commons.collections4.CollectionUtils; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/CdcEngineFactory.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/CdcEngineFactory.java index e08e1c8216..d53499e147 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/CdcEngineFactory.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/CdcEngineFactory.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.cdc; +import org.apache.eventmesh.common.config.connector.SourceConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; import org.apache.eventmesh.spi.EventMeshExtensionType; import org.apache.eventmesh.spi.EventMeshSPI; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngine.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngine.java index 5650c3d0cc..22e9366f1e 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngine.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngine.java @@ -18,13 +18,16 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.cdc.mysql; import org.apache.eventmesh.common.EventMeshThreadFactory; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.MysqlConfig; import org.apache.eventmesh.connector.jdbc.CatalogChanges; import org.apache.eventmesh.connector.jdbc.DataChanges; import org.apache.eventmesh.connector.jdbc.DataChanges.Builder; import org.apache.eventmesh.connector.jdbc.Field; import org.apache.eventmesh.connector.jdbc.Payload; import org.apache.eventmesh.connector.jdbc.Schema; -import org.apache.eventmesh.connector.jdbc.config.JdbcConfig; import org.apache.eventmesh.connector.jdbc.connection.mysql.MysqlJdbcConnection; import org.apache.eventmesh.connector.jdbc.dialect.mysql.MysqlDatabaseDialect; import org.apache.eventmesh.connector.jdbc.event.DeleteDataEvent; @@ -33,8 +36,6 @@ import org.apache.eventmesh.connector.jdbc.event.InsertDataEvent; import org.apache.eventmesh.connector.jdbc.event.SchemaChangeEventType; import org.apache.eventmesh.connector.jdbc.event.UpdateDataEvent; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; -import org.apache.eventmesh.connector.jdbc.source.config.MysqlConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.MysqlAntlr4DdlParser; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.AbstractCdcEngine; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.mysql.RowDeserializers.DeleteRowsEventMeshDeserializer; @@ -51,7 +52,6 @@ import org.apache.eventmesh.connector.jdbc.table.catalog.TableSchema; import org.apache.eventmesh.connector.jdbc.table.catalog.mysql.MysqlDefaultValueConvertorImpl; import org.apache.eventmesh.connector.jdbc.table.type.Pair; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngineFactory.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngineFactory.java index 35e722fe12..2a87969cf8 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngineFactory.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/cdc/mysql/MysqlCdcEngineFactory.java @@ -17,12 +17,12 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.cdc.mysql; +import org.apache.eventmesh.common.config.connector.SourceConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; import org.apache.eventmesh.connector.jdbc.dialect.mysql.MysqlDatabaseDialect; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.AbstractCdcEngineFactory; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.CdcEngine; import org.apache.eventmesh.connector.jdbc.source.dialect.cdc.CdcEngineFactory; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/mysql/MysqlJdbcContext.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/mysql/MysqlJdbcContext.java index 0dcf7ad299..8775d4d488 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/mysql/MysqlJdbcContext.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/mysql/MysqlJdbcContext.java @@ -17,12 +17,12 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.mysql; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.PartitionOffSetContextPair; import org.apache.eventmesh.connector.jdbc.UniversalJdbcContext; import org.apache.eventmesh.connector.jdbc.context.mysql.MysqlOffsetContext; import org.apache.eventmesh.connector.jdbc.context.mysql.MysqlPartition; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; -import org.apache.eventmesh.connector.jdbc.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.MysqlAntlr4DdlParser; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/AbstractSnapshotEngine.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/AbstractSnapshotEngine.java index 96302a5a87..3c46970182 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/AbstractSnapshotEngine.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/AbstractSnapshotEngine.java @@ -18,6 +18,7 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.snapshot; import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.DataChanges; import org.apache.eventmesh.connector.jdbc.DataChanges.Builder; import org.apache.eventmesh.connector.jdbc.Field; @@ -33,7 +34,6 @@ import org.apache.eventmesh.connector.jdbc.event.InsertDataEvent; import org.apache.eventmesh.connector.jdbc.source.AbstractEngine; import org.apache.eventmesh.connector.jdbc.source.SourceMateData; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.SnapshotResult.SnapshotResultStatus; import org.apache.eventmesh.connector.jdbc.table.catalog.Column; import org.apache.eventmesh.connector.jdbc.table.catalog.TableId; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/SnapshotEngineFactory.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/SnapshotEngineFactory.java index d573d7081a..30dfe2e997 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/SnapshotEngineFactory.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/SnapshotEngineFactory.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.snapshot; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.JdbcContext; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; import org.apache.eventmesh.spi.EventMeshExtensionType; import org.apache.eventmesh.spi.EventMeshSPI; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngine.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngine.java index e76ab49a3e..b6d6d7f8bf 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngine.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngine.java @@ -17,6 +17,8 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.mysql; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.MysqlConfig; import org.apache.eventmesh.connector.jdbc.CatalogChanges; import org.apache.eventmesh.connector.jdbc.connection.mysql.MysqlJdbcConnection; import org.apache.eventmesh.connector.jdbc.context.mysql.MysqlOffsetContext; @@ -26,8 +28,6 @@ import org.apache.eventmesh.connector.jdbc.event.EventConsumer; import org.apache.eventmesh.connector.jdbc.event.SchemaChangeEventType; import org.apache.eventmesh.connector.jdbc.source.SourceMateData; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; -import org.apache.eventmesh.connector.jdbc.source.config.MysqlConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlConstants; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlDialectSql; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlJdbcContext; diff --git a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngineFactory.java b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngineFactory.java index 210ded0edf..bac2bdafba 100644 --- a/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngineFactory.java +++ b/eventmesh-connectors/eventmesh-connector-jdbc/src/main/java/org/apache/eventmesh/connector/jdbc/source/dialect/snapshot/mysql/MysqlSnapshotEngineFactory.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.mysql; +import org.apache.eventmesh.common.config.connector.rdb.jdbc.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.dialect.DatabaseDialect; import org.apache.eventmesh.connector.jdbc.dialect.mysql.MysqlDatabaseDialect; -import org.apache.eventmesh.connector.jdbc.source.config.JdbcSourceConfig; import org.apache.eventmesh.connector.jdbc.source.dialect.antlr4.mysql.MysqlAntlr4DdlParser; import org.apache.eventmesh.connector.jdbc.source.dialect.mysql.MysqlJdbcContext; import org.apache.eventmesh.connector.jdbc.source.dialect.snapshot.SnapshotEngine; diff --git a/eventmesh-connectors/eventmesh-connector-kafka/build.gradle b/eventmesh-connectors/eventmesh-connector-kafka/build.gradle index 39dcbf656e..2796e03c0e 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-kafka/build.gradle @@ -16,9 +16,10 @@ */ dependencies { + implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation 'io.cloudevents:cloudevents-kafka:2.5.0' - implementation 'org.apache.kafka:kafka-clients:3.6.2' + implementation 'org.apache.kafka:kafka-clients:3.8.1' compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' } diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/config/KafkaServerConfig.java b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/config/KafkaServerConfig.java index 2c9ef71fb4..ccbabf2676 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/config/KafkaServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/config/KafkaServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.kafka.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/connector/KafkaSinkConnector.java b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/connector/KafkaSinkConnector.java index 49b7e88f19..0adafc1ce6 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/connector/KafkaSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/sink/connector/KafkaSinkConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.kafka.sink.connector; -import org.apache.eventmesh.connector.kafka.sink.config.KafkaSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.kafka.KafkaSinkConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -94,6 +94,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { producer.close(); diff --git a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java index 99fa73b580..f771e907cb 100644 --- a/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-kafka/src/main/java/org/apache/eventmesh/connector/kafka/source/connector/KafkaSourceConnector.java @@ -17,14 +17,16 @@ package org.apache.eventmesh.connector.kafka.source.connector; -import org.apache.eventmesh.connector.kafka.source.config.KafkaSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.kafka.KafkaSourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.kafka.KafkaRecordOffset; +import org.apache.eventmesh.common.remote.offset.kafka.KafkaRecordPartition; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -34,9 +36,7 @@ import java.time.Duration; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Properties; public class KafkaSourceConnector implements Source { @@ -45,7 +45,7 @@ public class KafkaSourceConnector implements Source { private KafkaConsumer kafkaConsumer; - private int pollTimeOut = 100; + private long maxPollWaitTime; @Override public Class configClass() { @@ -75,7 +75,7 @@ private void doInit() { props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, sourceConfig.getConnectorConfig().getMaxPollRecords()); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, sourceConfig.getConnectorConfig().getAutoCommitIntervalMS()); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sourceConfig.getConnectorConfig().getSessionTimeoutMS()); - this.pollTimeOut = sourceConfig.getConnectorConfig().getPollTimeOut(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); this.kafkaConsumer = new KafkaConsumer<>(props); } @@ -94,6 +94,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { kafkaConsumer.unsubscribe(); @@ -101,7 +106,7 @@ public void stop() { @Override public List poll() { - ConsumerRecords records = kafkaConsumer.poll(Duration.ofMillis(pollTimeOut)); + ConsumerRecords records = kafkaConsumer.poll(Duration.ofMillis(maxPollWaitTime)); List connectRecords = new ArrayList<>(records.count()); for (ConsumerRecord record : records) { Long timestamp = System.currentTimeMillis(); @@ -118,15 +123,15 @@ public List poll() { } public static RecordOffset convertToRecordOffset(Long offset) { - Map offsetMap = new HashMap<>(); - offsetMap.put("queueOffset", offset + ""); - return new RecordOffset(offsetMap); + KafkaRecordOffset recordOffset = new KafkaRecordOffset(); + recordOffset.setOffset(offset); + return recordOffset; } public static RecordPartition convertToRecordPartition(String topic, int partition) { - Map map = new HashMap<>(); - map.put("topic", topic); - map.put("partition", String.valueOf(partition)); - return new RecordPartition(map); + KafkaRecordPartition recordPartition = new KafkaRecordPartition(); + recordPartition.setTopic(topic); + recordPartition.setPartition(partition); + return recordPartition; } } diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/config/KnativeServerConfig.java b/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/config/KnativeServerConfig.java index 4125132ad7..9469a5d4b5 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/config/KnativeServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/config/KnativeServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.knative.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/connector/KnativeSinkConnector.java b/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/connector/KnativeSinkConnector.java index fdf5b1056b..b14f77ecd4 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/connector/KnativeSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/sink/connector/KnativeSinkConnector.java @@ -19,10 +19,10 @@ import static org.asynchttpclient.Dsl.asyncHttpClient; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.knative.KnativeSinkConfig; import org.apache.eventmesh.connector.knative.cloudevent.KnativeHeaders; import org.apache.eventmesh.connector.knative.cloudevent.KnativeMessageFactory; -import org.apache.eventmesh.connector.knative.sink.config.KnativeSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -82,6 +82,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { started.compareAndSet(true, false); diff --git a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/connector/KnativeSourceConnector.java b/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/connector/KnativeSourceConnector.java index 239c39a802..1b0c033e8f 100644 --- a/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/connector/KnativeSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-knative/src/main/java/org/apache/eventmesh/connector/knative/source/connector/KnativeSourceConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.knative.source.connector; -import org.apache.eventmesh.connector.knative.source.config.KnativeSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.knative.KnativeSourceConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -65,6 +65,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { started.compareAndSet(true, false); diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/ConfigUtils.java b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/ConfigUtils.java new file mode 100644 index 0000000000..f0017397e4 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/ConfigUtils.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.lark; + +import org.apache.eventmesh.common.config.connector.lark.SinkConnectorConfig; + +import org.apache.commons.lang3.StringUtils; + +import com.lark.oapi.service.im.v1.enums.ReceiveIdTypeEnum; + +public class ConfigUtils { + + public static void validateSinkConfiguration(SinkConnectorConfig sinkConnectorConfig) { + // validate blank + if (StringUtils.isAnyBlank(sinkConnectorConfig.getAppId(), sinkConnectorConfig.getAppSecret(), sinkConnectorConfig.getReceiveId())) { + throw new IllegalArgumentException("appId or appSecret or receiveId is blank,please check it."); + } + + // validate receiveIdType + if (!StringUtils.containsAny(sinkConnectorConfig.getReceiveIdType(), ReceiveIdTypeEnum.CHAT_ID.getValue(), + ReceiveIdTypeEnum.EMAIL.getValue(), + ReceiveIdTypeEnum.OPEN_ID.getValue(), + ReceiveIdTypeEnum.USER_ID.getValue(), + ReceiveIdTypeEnum.UNION_ID.getValue())) { + throw new IllegalArgumentException( + String.format("sinkConnectorConfig.receiveIdType=[%s], Invalid.", sinkConnectorConfig.getReceiveIdType())); + } + } +} diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/config/LarkConnectServerConfig.java b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/config/LarkConnectServerConfig.java index 6d0428056e..0106fa4a65 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/config/LarkConnectServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/config/LarkConnectServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.lark.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandler.java b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandler.java index 75e51e690b..ce5d4a3b85 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandler.java +++ b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandler.java @@ -19,9 +19,9 @@ import static org.apache.eventmesh.connector.lark.sink.connector.LarkSinkConnector.getTenantAccessToken; +import org.apache.eventmesh.common.config.connector.lark.SinkConnectorConfig; import org.apache.eventmesh.connector.lark.ConnectRecordExtensionKeys; import org.apache.eventmesh.connector.lark.config.LarkMessageTemplateType; -import org.apache.eventmesh.connector.lark.sink.config.SinkConnectorConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.commons.text.StringEscapeUtils; diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/connector/LarkSinkConnector.java b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/connector/LarkSinkConnector.java index d1ee1caa40..9981322e8f 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/connector/LarkSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-lark/src/main/java/org/apache/eventmesh/connector/lark/sink/connector/LarkSinkConnector.java @@ -19,10 +19,11 @@ import static org.apache.eventmesh.connector.lark.sink.ImServiceHandler.create; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.lark.LarkSinkConfig; +import org.apache.eventmesh.common.config.connector.lark.SinkConnectorConfig; +import org.apache.eventmesh.connector.lark.ConfigUtils; import org.apache.eventmesh.connector.lark.sink.ImServiceHandler; -import org.apache.eventmesh.connector.lark.sink.config.LarkSinkConfig; -import org.apache.eventmesh.connector.lark.sink.config.SinkConnectorConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -87,7 +88,7 @@ public void init(ConnectorContext connectorContext) { this.sinkConfig = (LarkSinkConfig) sinkConnectorContext.getSinkConfig(); SinkConnectorConfig sinkConnectorConfig = sinkConfig.getSinkConnectorConfig(); - sinkConnectorConfig.validateSinkConfiguration(); + ConfigUtils.validateSinkConfiguration(sinkConnectorConfig); imServiceHandler = create(sinkConnectorConfig); } @@ -109,6 +110,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { if (!started.compareAndSet(true, false)) { diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandlerTest.java b/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandlerTest.java index f72232a533..9c54717fac 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandlerTest.java +++ b/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/ImServiceHandlerTest.java @@ -27,11 +27,9 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.apache.eventmesh.connector.lark.sink.config.LarkSinkConfig; -import org.apache.eventmesh.connector.lark.sink.config.SinkConnectorConfig; +import org.apache.eventmesh.common.config.connector.lark.LarkSinkConfig; +import org.apache.eventmesh.common.config.connector.lark.SinkConnectorConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.lang.reflect.Field; @@ -103,9 +101,8 @@ public void testRegularSinkAsync() throws Exception { private void regularSink() throws Exception { final int times = 3; for (int i = 0; i < times; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "test-lark".getBytes(StandardCharsets.UTF_8)); if (Boolean.parseBoolean(sinkConnectorConfig.getSinkAsync())) { imServiceHandler.sinkAsync(connectRecord); @@ -145,9 +142,8 @@ private void retrySink() throws Exception { long duration = retryDelayInMills * sinkTimes; for (int i = 0; i < times; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "test-lark".getBytes(StandardCharsets.UTF_8)); if (Boolean.parseBoolean(sinkConnectorConfig.getSinkAsync())) { imServiceHandler.sinkAsync(connectRecord); diff --git a/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/LarkSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/LarkSinkConnectorTest.java index 658fa89223..a02c845dc5 100644 --- a/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/LarkSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-lark/src/test/java/org/apache/eventmesh/connector/lark/sink/LarkSinkConnectorTest.java @@ -24,12 +24,10 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.apache.eventmesh.connector.lark.sink.config.LarkSinkConfig; +import org.apache.eventmesh.common.config.connector.lark.LarkSinkConfig; import org.apache.eventmesh.connector.lark.sink.connector.LarkSinkConnector; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.nio.charset.StandardCharsets; @@ -82,9 +80,7 @@ public void testPut() throws Exception { final int times = 3; List connectRecords = new ArrayList<>(); for (int i = 0; i < times; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "test-lark".getBytes(StandardCharsets.UTF_8)); connectRecords.add(connectRecord); } diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/config/MongodbServerConfig.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/config/MongodbServerConfig.java index 95c651b358..fa5618d4a8 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/config/MongodbServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/config/MongodbServerConfig.java @@ -17,10 +17,12 @@ package org.apache.eventmesh.connector.mongodb.config; +import org.apache.eventmesh.common.config.connector.Config; + import lombok.Data; @Data -public class MongodbServerConfig { +public class MongodbServerConfig extends Config { private boolean sourceEnable; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbReplicaSetSinkClient.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbReplicaSetSinkClient.java index 638e1a7d3f..0afae2b8de 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbReplicaSetSinkClient.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbReplicaSetSinkClient.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.mongodb.sink.client; +import org.apache.eventmesh.common.config.connector.rdb.mongodb.SinkConnectorConfig; import org.apache.eventmesh.connector.mongodb.sink.client.Impl.MongodbSinkClient; -import org.apache.eventmesh.connector.mongodb.sink.config.SinkConnectorConfig; import org.apache.eventmesh.connector.mongodb.utils.MongodbCloudEventUtil; import org.bson.Document; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbStandaloneSinkClient.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbStandaloneSinkClient.java index 45bdf6f699..4a87a4320f 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbStandaloneSinkClient.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/client/MongodbStandaloneSinkClient.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.mongodb.sink.client; +import org.apache.eventmesh.common.config.connector.rdb.mongodb.SinkConnectorConfig; import org.apache.eventmesh.connector.mongodb.constant.MongodbConstants; import org.apache.eventmesh.connector.mongodb.sink.client.Impl.MongodbSinkClient; -import org.apache.eventmesh.connector.mongodb.sink.config.SinkConnectorConfig; import org.apache.eventmesh.connector.mongodb.utils.MongodbCloudEventUtil; import org.bson.Document; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/connector/MongodbSinkConnector.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/connector/MongodbSinkConnector.java index 814aaf2882..1001ffa584 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/connector/MongodbSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/sink/connector/MongodbSinkConnector.java @@ -17,11 +17,11 @@ package org.apache.eventmesh.connector.mongodb.sink.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.mongodb.MongodbSinkConfig; import org.apache.eventmesh.connector.mongodb.sink.client.Impl.MongodbSinkClient; import org.apache.eventmesh.connector.mongodb.sink.client.MongodbReplicaSetSinkClient; import org.apache.eventmesh.connector.mongodb.sink.client.MongodbStandaloneSinkClient; -import org.apache.eventmesh.connector.mongodb.sink.config.MongodbSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -87,6 +87,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { this.client.stop(); diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbReplicaSetSourceClient.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbReplicaSetSourceClient.java index 468cf6c92e..b389c0db9c 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbReplicaSetSourceClient.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbReplicaSetSourceClient.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.mongodb.source.client; +import org.apache.eventmesh.common.config.connector.rdb.mongodb.SourceConnectorConfig; import org.apache.eventmesh.connector.mongodb.source.client.Impl.MongodbSourceClient; -import org.apache.eventmesh.connector.mongodb.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.mongodb.utils.MongodbCloudEventUtil; import java.util.concurrent.BlockingQueue; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbStandaloneSourceClient.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbStandaloneSourceClient.java index e062ab034d..ce7452e0ae 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbStandaloneSourceClient.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/client/MongodbStandaloneSourceClient.java @@ -18,9 +18,9 @@ package org.apache.eventmesh.connector.mongodb.source.client; import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.connector.rdb.mongodb.SourceConnectorConfig; import org.apache.eventmesh.connector.mongodb.constant.MongodbConstants; import org.apache.eventmesh.connector.mongodb.source.client.Impl.MongodbSourceClient; -import org.apache.eventmesh.connector.mongodb.source.config.SourceConnectorConfig; import org.apache.eventmesh.connector.mongodb.utils.MongodbCloudEventUtil; import java.util.concurrent.BlockingQueue; diff --git a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java index fec64de56c..1d1dcc1843 100644 --- a/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-mongodb/src/main/java/org/apache/eventmesh/connector/mongodb/source/connector/MongodbSourceConnector.java @@ -17,11 +17,11 @@ package org.apache.eventmesh.connector.mongodb.source.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.rdb.mongodb.MongodbSourceConfig; import org.apache.eventmesh.connector.mongodb.source.client.Impl.MongodbSourceClient; import org.apache.eventmesh.connector.mongodb.source.client.MongodbReplicaSetSourceClient; import org.apache.eventmesh.connector.mongodb.source.client.MongodbStandaloneSourceClient; -import org.apache.eventmesh.connector.mongodb.source.config.MongodbSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -42,10 +42,12 @@ public class MongodbSourceConnector implements Source { private MongodbSourceConfig sourceConfig; - private static final int DEFAULT_BATCH_SIZE = 10; - private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + private MongodbSourceClient client; @Override @@ -67,7 +69,9 @@ public void init(ConnectorContext connectorContext) throws Exception { } private void doInit() { - this.queue = new LinkedBlockingQueue<>(1000); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); String connectorType = sourceConfig.getConnectorConfig().getConnectorType(); if (connectorType.equals(ClusterType.STANDALONE.name())) { this.client = new MongodbStandaloneSourceClient(sourceConfig.getConnectorConfig(), queue); @@ -93,6 +97,11 @@ public String name() { return this.sourceConfig.connectorConfig.getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { this.client.stop(); @@ -100,15 +109,21 @@ public void stop() throws Exception { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/config/OpenFunctionServerConfig.java b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/config/OpenFunctionServerConfig.java index 2cf28000f5..b4ae607d5b 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/config/OpenFunctionServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/config/OpenFunctionServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.openfunction.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnector.java b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnector.java index aed6936004..0f00a7e381 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.openfunction.sink.connector; -import org.apache.eventmesh.connector.openfunction.sink.config.OpenFunctionSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.openfunction.OpenFunctionSinkConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -74,6 +74,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { } diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java index 916d6793db..e40c451ff8 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-openfunction/src/main/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.openfunction.source.connector; -import org.apache.eventmesh.connector.openfunction.source.config.OpenFunctionSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.openfunction.OpenFunctionSourceConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -35,12 +35,14 @@ @Slf4j public class OpenFunctionSourceConnector implements Source { - private static final int DEFAULT_BATCH_SIZE = 10; - private OpenFunctionSourceConfig sourceConfig; private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + @Override public Class configClass() { return OpenFunctionSourceConfig.class; @@ -50,7 +52,7 @@ public Class configClass() { public void init(Config config) throws Exception { // init config for openfunction source connector this.sourceConfig = (OpenFunctionSourceConfig) config; - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); } @Override @@ -58,7 +60,14 @@ public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; // init config for openfunction source connector this.sourceConfig = (OpenFunctionSourceConfig) sourceConnectorContext.getSourceConfig(); - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); + } + + private void doInit() { + // init config for openfunction source connector + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); } @Override @@ -76,6 +85,11 @@ public String name() { return this.sourceConfig.getSourceConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { @@ -87,16 +101,21 @@ public BlockingQueue queue() { @Override public List poll() { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - ConnectRecord connectRecord = queue.poll(3, TimeUnit.SECONDS); + ConnectRecord connectRecord = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (connectRecord == null) { break; } connectRecords.add(connectRecord); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { Thread currentThread = Thread.currentThread(); log.warn("[OpenFunctionSourceConnector] Interrupting thread {} due to exception {}", diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnectorTest.java index efb25b8ea9..6751c0ec17 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/sink/connector/OpenFunctionSinkConnectorTest.java @@ -19,10 +19,8 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.apache.eventmesh.connector.openfunction.sink.config.OpenFunctionSinkConfig; +import org.apache.eventmesh.common.config.connector.openfunction.OpenFunctionSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.util.ArrayList; import java.util.List; @@ -72,9 +70,7 @@ public void shutdownConnector() { private void writeMockedRecords(int count, String message) throws Exception { List records = new ArrayList<>(); for (int i = 0; i < count; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - records.add(new ConnectRecord(partition, offset, System.currentTimeMillis(), message + i)); + records.add(new ConnectRecord(null, null, System.currentTimeMillis(), message + i)); } connector.put(records); } diff --git a/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnectorTest.java index 2663443dbe..880ee701dc 100644 --- a/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-openfunction/src/test/java/org/apache/eventmesh/connector/openfunction/source/connector/OpenFunctionSourceConnectorTest.java @@ -17,10 +17,8 @@ package org.apache.eventmesh.connector.openfunction.source.connector; -import org.apache.eventmesh.connector.openfunction.source.config.OpenFunctionSourceConfig; +import org.apache.eventmesh.common.config.connector.openfunction.OpenFunctionSourceConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.util.List; import java.util.concurrent.BlockingQueue; @@ -53,9 +51,7 @@ public void testSpringSourceConnector() throws Exception { private void writeMockedRecords(int count, String message) { BlockingQueue queue = connector.queue(); for (int i = 0; i < count; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord record = new ConnectRecord(partition, offset, System.currentTimeMillis(), message + i); + ConnectRecord record = new ConnectRecord(null, null, System.currentTimeMillis(), message + i); queue.offer(record); } } diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/config/PravegaServerConfig.java b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/config/PravegaServerConfig.java index bf5d9a4fb7..5945b4a7ae 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/config/PravegaServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/config/PravegaServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.pravega.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/connector/PravegaSinkConnector.java b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/connector/PravegaSinkConnector.java index d1d29ad95d..e089ef6760 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/connector/PravegaSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/sink/connector/PravegaSinkConnector.java @@ -17,11 +17,11 @@ package org.apache.eventmesh.connector.pravega.sink.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.pravega.PravegaSinkConfig; import org.apache.eventmesh.connector.pravega.client.PravegaCloudEventWriter; import org.apache.eventmesh.connector.pravega.client.PravegaEvent; import org.apache.eventmesh.connector.pravega.exception.PravegaConnectorException; -import org.apache.eventmesh.connector.pravega.sink.config.PravegaSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -109,6 +109,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { writerMap.forEach((topic, writer) -> writer.close()); diff --git a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java index c72c38f71d..4b5e4751b3 100644 --- a/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-pravega/src/main/java/org/apache/eventmesh/connector/pravega/source/connector/PravegaSourceConnector.java @@ -18,9 +18,9 @@ package org.apache.eventmesh.connector.pravega.source.connector; import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.pravega.PravegaSourceConfig; import org.apache.eventmesh.connector.pravega.client.PravegaEvent; -import org.apache.eventmesh.connector.pravega.source.config.PravegaSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -57,8 +57,6 @@ public class PravegaSourceConnector implements Source { private static final AtomicBoolean started = new AtomicBoolean(false); - private static final int DEFAULT_BATCH_SIZE = 10; - private PravegaSourceConfig sourceConfig; private StreamManager streamManager; @@ -71,6 +69,10 @@ public class PravegaSourceConnector implements Source { private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + private final ThreadPoolExecutor executor = ThreadPoolFactory.createThreadPoolExecutor( Runtime.getRuntime().availableProcessors() * 2, Runtime.getRuntime().availableProcessors() * 2, @@ -89,7 +91,9 @@ public void init(Config config) throws Exception { public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; this.sourceConfig = (PravegaSourceConfig) sourceConnectorContext.getSourceConfig(); - this.queue = new LinkedBlockingQueue<>(1000); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); streamManager = StreamManager.create(sourceConfig.getConnectorConfig().getControllerURI()); ClientConfig.ClientConfigBuilder clientConfigBuilder = @@ -148,6 +152,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { sourceHandlerMap.forEach((topic, handler) -> { @@ -163,15 +172,21 @@ public void stop() { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-prometheus/build.gradle b/eventmesh-connectors/eventmesh-connector-prometheus/build.gradle index 9650575803..97e4ed12f3 100644 --- a/eventmesh-connectors/eventmesh-connector-prometheus/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-prometheus/build.gradle @@ -16,6 +16,7 @@ */ dependencies { + implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation 'org.apache.httpcomponents:httpclient' implementation 'com.github.rholder:guava-retrying' diff --git a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/config/PrometheusServerConfig.java b/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/config/PrometheusServerConfig.java index 3222e28210..a238109c72 100644 --- a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/config/PrometheusServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/config/PrometheusServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.prometheus.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/connector/PrometheusSourceConnector.java b/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/connector/PrometheusSourceConnector.java index 2c75f394fc..0cafed73f3 100644 --- a/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/connector/PrometheusSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-prometheus/src/main/java/org/apache/eventmesh/connector/prometheus/source/connector/PrometheusSourceConnector.java @@ -17,16 +17,18 @@ package org.apache.eventmesh.connector.prometheus.source.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.prometheus.PrometheusSourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.prometheus.PrometheusRecordOffset; +import org.apache.eventmesh.common.remote.offset.prometheus.PrometheusRecordPartition; import org.apache.eventmesh.connector.prometheus.model.QueryPrometheusReq; import org.apache.eventmesh.connector.prometheus.model.QueryPrometheusRsp; -import org.apache.eventmesh.connector.prometheus.source.config.PrometheusSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; @@ -143,6 +145,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { log.info("prometheus source connector stop."); @@ -185,8 +192,8 @@ public List poll() { private ConnectRecord assembleRecord(String data) { Long timestamp = System.currentTimeMillis(); - RecordPartition recordPartition = new RecordPartition(); - RecordOffset recordOffset = new RecordOffset(); + RecordPartition recordPartition = new PrometheusRecordPartition(); + RecordOffset recordOffset = new PrometheusRecordOffset(); return new ConnectRecord(recordPartition, recordOffset, timestamp, data); } diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/build.gradle b/eventmesh-connectors/eventmesh-connector-pulsar/build.gradle index f087842ea8..62ab590d89 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-pulsar/build.gradle @@ -16,6 +16,7 @@ */ dependencies { + implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") /* diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/config/PulsarServerConfig.java b/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/config/PulsarServerConfig.java index 56d0b04fed..2c7e939259 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/config/PulsarServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/config/PulsarServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.pulsar.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/connector/PulsarSinkConnector.java b/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/connector/PulsarSinkConnector.java index 5ea0a0147a..3f90c6c1be 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/connector/PulsarSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/sink/connector/PulsarSinkConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.pulsar.sink.connector; -import org.apache.eventmesh.connector.pulsar.sink.config.PulsarSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.pulsar.PulsarSinkConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -85,6 +85,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { try { diff --git a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/connector/PulsarSourceConnector.java b/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/connector/PulsarSourceConnector.java index 718dfc7357..0bc576221e 100644 --- a/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/connector/PulsarSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-pulsar/src/main/java/org/apache/eventmesh/connector/pulsar/source/connector/PulsarSourceConnector.java @@ -17,13 +17,13 @@ package org.apache.eventmesh.connector.pulsar.source.connector; -import org.apache.eventmesh.connector.pulsar.source.config.PulsarSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.pulsar.PulsarSourceConfig; +import org.apache.eventmesh.common.remote.offset.pulsar.PulsarRecordPartition; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.pulsar.client.api.Consumer; import org.apache.pulsar.client.api.Message; @@ -33,9 +33,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import lombok.extern.slf4j.Slf4j; @@ -89,6 +87,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { try { @@ -108,10 +111,9 @@ public List poll() { Message message = (Message) msg; byte[] body = message.getData(); String bodyStr = new String(body, StandardCharsets.UTF_8); - Map map = new HashMap<>(); - map.put("topic", consumer.getTopic()); - map.put("queueId", String.valueOf(message.getSequenceId())); - RecordPartition partition = new RecordPartition(map); + PulsarRecordPartition partition = new PulsarRecordPartition(); + partition.setTopic(consumer.getTopic()); + partition.setQueueId(message.getSequenceId()); ConnectRecord connectRecord = new ConnectRecord(partition, null, timestamp, bodyStr); connectRecord.addExtension("topic", consumer.getTopic()); connectRecords.add(connectRecord); diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle b/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle index 54764c7b63..2693b681d0 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-rabbitmq/build.gradle @@ -20,7 +20,7 @@ dependencies { api project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation project(":eventmesh-common") // rabbitmq - implementation 'com.rabbitmq:amqp-client:5.21.0' + implementation 'com.rabbitmq:amqp-client:5.22.0' implementation 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/config/RabbitMQServerConfig.java b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/config/RabbitMQServerConfig.java index 9b33ce0c83..27b89c62e5 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/config/RabbitMQServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/config/RabbitMQServerConfig.java @@ -17,10 +17,14 @@ package org.apache.eventmesh.connector.rabbitmq.config; +import org.apache.eventmesh.common.config.connector.Config; + import lombok.Data; +import lombok.EqualsAndHashCode; @Data -public class RabbitMQServerConfig { +@EqualsAndHashCode(callSuper = true) +public class RabbitMQServerConfig extends Config { private boolean sourceEnable; diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/connector/RabbitMQSinkConnector.java b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/connector/RabbitMQSinkConnector.java index 250f31c5bb..08d1cefbac 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/connector/RabbitMQSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/sink/connector/RabbitMQSinkConnector.java @@ -17,12 +17,12 @@ package org.apache.eventmesh.connector.rabbitmq.sink.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.rabbitmq.RabbitMQSinkConfig; import org.apache.eventmesh.connector.rabbitmq.client.RabbitmqClient; import org.apache.eventmesh.connector.rabbitmq.client.RabbitmqConnectionFactory; import org.apache.eventmesh.connector.rabbitmq.cloudevent.RabbitmqCloudEvent; import org.apache.eventmesh.connector.rabbitmq.cloudevent.RabbitmqCloudEventWriter; -import org.apache.eventmesh.connector.rabbitmq.sink.config.RabbitMQSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -33,6 +33,7 @@ import io.cloudevents.CloudEvent; +import com.rabbitmq.client.BuiltinExchangeType; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; @@ -77,7 +78,8 @@ public void init(ConnectorContext connectorContext) throws Exception { @Override public void start() throws Exception { if (!started) { - rabbitmqClient.binding(channel, sinkConfig.getConnectorConfig().getExchangeType(), sinkConfig.getConnectorConfig().getExchangeName(), + BuiltinExchangeType builtinExchangeType = BuiltinExchangeType.valueOf(sinkConfig.getConnectorConfig().getExchangeType()); + rabbitmqClient.binding(channel, builtinExchangeType, sinkConfig.getConnectorConfig().getExchangeName(), sinkConfig.getConnectorConfig().getRoutingKey(), sinkConfig.getConnectorConfig().getQueueName()); started = true; } @@ -93,6 +95,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { if (started) { diff --git a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java index 95b09034ef..a19b159c1c 100644 --- a/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-rabbitmq/src/main/java/org/apache/eventmesh/connector/rabbitmq/source/connector/RabbitMQSourceConnector.java @@ -18,12 +18,12 @@ package org.apache.eventmesh.connector.rabbitmq.source.connector; import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.rabbitmq.RabbitMQSourceConfig; +import org.apache.eventmesh.common.config.connector.mq.rabbitmq.SourceConnectorConfig; import org.apache.eventmesh.connector.rabbitmq.client.RabbitmqClient; import org.apache.eventmesh.connector.rabbitmq.client.RabbitmqConnectionFactory; import org.apache.eventmesh.connector.rabbitmq.cloudevent.RabbitmqCloudEvent; -import org.apache.eventmesh.connector.rabbitmq.source.config.RabbitMQSourceConfig; -import org.apache.eventmesh.connector.rabbitmq.source.config.SourceConnectorConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -40,6 +40,7 @@ import io.cloudevents.CloudEvent; +import com.rabbitmq.client.BuiltinExchangeType; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.GetResponse; @@ -53,10 +54,12 @@ public class RabbitMQSourceConnector implements Source { private volatile boolean started = false; - private static final int DEFAULT_BATCH_SIZE = 10; - private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + private final RabbitmqConnectionFactory rabbitmqConnectionFactory = new RabbitmqConnectionFactory(); private RabbitMQSourceHandler rabbitMQSourceHandler; @@ -83,7 +86,9 @@ public void init(Config config) throws Exception { @Override public void init(ConnectorContext connectorContext) throws Exception { - this.queue = new LinkedBlockingQueue<>(1000); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); this.sourceConfig = (RabbitMQSourceConfig) ((SourceConnectorContext) connectorContext).getSourceConfig(); this.rabbitmqClient = new RabbitmqClient(rabbitmqConnectionFactory); this.connection = rabbitmqClient.getConnection(sourceConfig.getConnectorConfig().getHost(), @@ -98,7 +103,8 @@ public void init(ConnectorContext connectorContext) throws Exception { @Override public void start() throws Exception { if (!started) { - rabbitmqClient.binding(channel, sourceConfig.getConnectorConfig().getExchangeType(), sourceConfig.getConnectorConfig().getExchangeName(), + BuiltinExchangeType builtinExchangeType = BuiltinExchangeType.valueOf(sourceConfig.getConnectorConfig().getExchangeType()); + rabbitmqClient.binding(channel, builtinExchangeType, sourceConfig.getConnectorConfig().getExchangeName(), sourceConfig.getConnectorConfig().getRoutingKey(), sourceConfig.getConnectorConfig().getQueueName()); executor.execute(this.rabbitMQSourceHandler); started = true; @@ -115,6 +121,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { if (started) { @@ -132,15 +143,21 @@ public void stop() { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-redis/build.gradle b/eventmesh-connectors/eventmesh-connector-redis/build.gradle index 425a10570a..fabfe1c983 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-redis/build.gradle @@ -16,9 +16,10 @@ */ dependencies { + implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation 'org.redisson:redisson:3.17.3' + implementation 'org.redisson:redisson:3.38.1' api 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/config/RedisServerConfig.java b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/config/RedisServerConfig.java index 6b55b0e6e9..d4aaee292a 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/config/RedisServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/config/RedisServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.redis.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnector.java b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnector.java index e07e05e276..5b7d27c3ba 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnector.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.redis.sink.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.redis.RedisSinkConfig; import org.apache.eventmesh.connector.redis.cloudevent.CloudEventCodec; -import org.apache.eventmesh.connector.redis.sink.config.RedisSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -85,6 +85,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { this.redissonClient.shutdown(); diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java index 2933d545dd..5b858afa30 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-redis/src/main/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnector.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.redis.source.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.redis.RedisSourceConfig; import org.apache.eventmesh.connector.redis.cloudevent.CloudEventCodec; -import org.apache.eventmesh.connector.redis.source.config.RedisSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; @@ -40,8 +40,6 @@ public class RedisSourceConnector implements Source { - private static final int DEFAULT_BATCH_SIZE = 10; - private RTopic topic; private RedisSourceConfig sourceConfig; @@ -50,6 +48,10 @@ public class RedisSourceConnector implements Source { private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + @Override public Class configClass() { return RedisSourceConfig.class; @@ -73,7 +75,9 @@ private void doInit() { redisConfig.useSingleServer().setAddress(sourceConfig.connectorConfig.getServer()); redisConfig.setCodec(CloudEventCodec.getInstance()); this.redissonClient = Redisson.create(redisConfig); - this.queue = new LinkedBlockingQueue<>(1000); + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); } @Override @@ -94,6 +98,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { this.topic.removeAllListeners(); @@ -102,15 +111,21 @@ public void stop() throws Exception { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; + + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - CloudEvent event = queue.poll(3, TimeUnit.SECONDS); + CloudEvent event = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (event == null) { break; } - connectRecords.add(CloudEventUtil.convertEventToRecord(event)); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { break; } diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnectorTest.java index 13ec4f7379..c4d153bd25 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/sink/connector/RedisSinkConnectorTest.java @@ -17,12 +17,10 @@ package org.apache.eventmesh.connector.redis.sink.connector; +import org.apache.eventmesh.common.config.connector.redis.RedisSinkConfig; import org.apache.eventmesh.connector.redis.AbstractRedisServer; import org.apache.eventmesh.connector.redis.cloudevent.CloudEventCodec; -import org.apache.eventmesh.connector.redis.sink.config.RedisSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.nio.charset.StandardCharsets; @@ -79,9 +77,7 @@ public void testPutConnectRecords() throws InterruptedException { List records = new ArrayList<>(); for (int i = 0; i < expectedCount; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, System.currentTimeMillis(), + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), expectedMessage.getBytes(StandardCharsets.UTF_8)); connectRecord.addExtension("id", String.valueOf(UUID.randomUUID())); connectRecord.addExtension("source", "testSource"); diff --git a/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnectorTest.java index da68a5b170..326798d64a 100644 --- a/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-redis/src/test/java/org/apache/eventmesh/connector/redis/source/connector/RedisSourceConnectorTest.java @@ -17,12 +17,10 @@ package org.apache.eventmesh.connector.redis.source.connector; +import org.apache.eventmesh.common.config.connector.redis.RedisSourceConfig; import org.apache.eventmesh.connector.redis.AbstractRedisServer; import org.apache.eventmesh.connector.redis.cloudevent.CloudEventCodec; -import org.apache.eventmesh.connector.redis.source.config.RedisSourceConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.CloudEventUtil; import org.apache.eventmesh.openconnect.util.ConfigUtil; @@ -76,9 +74,7 @@ public void testPollConnectRecords() throws Exception { private void publishMockEvents() { int mockCount = 5; for (int i = 0; i < mockCount; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, System.currentTimeMillis(), + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), ("\"" + expectedMessage + "\"").getBytes(StandardCharsets.UTF_8)); connectRecord.addExtension("id", String.valueOf(UUID.randomUUID())); connectRecord.addExtension("source", "testSource"); diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/config/RocketMQServerConfig.java b/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/config/RocketMQServerConfig.java index 4dc574cda2..842094e07f 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/config/RocketMQServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/config/RocketMQServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.rocketmq.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnector.java b/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnector.java index a63d92d7f5..31d45a28f4 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnector.java @@ -17,8 +17,9 @@ package org.apache.eventmesh.connector.rocketmq.sink.connector; -import org.apache.eventmesh.connector.rocketmq.sink.config.RocketMQSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.rocketmq.RocketMQSinkConfig; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -34,7 +35,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class RocketMQSinkConnector implements Sink { +public class RocketMQSinkConnector implements Sink, ConnectorCreateService { private RocketMQSinkConfig sinkConfig; @@ -77,6 +78,11 @@ public String name() { return this.sinkConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { producer.shutdown(); @@ -108,4 +114,9 @@ public Message convertRecordToMessage(ConnectRecord connectRecord) { } return message; } + + @Override + public Sink create() { + return new RocketMQSinkConnector(); + } } diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnector.java b/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnector.java index 8023be479f..410f927d75 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-rocketmq/src/main/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnector.java @@ -17,14 +17,17 @@ package org.apache.eventmesh.connector.rocketmq.source.connector; -import org.apache.eventmesh.connector.rocketmq.source.config.RocketMQSourceConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.mq.rocketmq.RocketMQSourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.rocketmq.RocketMQRecordOffset; +import org.apache.eventmesh.common.remote.offset.rocketmq.RocketMQRecordPartition; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetStorageReader; import org.apache.rocketmq.client.consumer.AllocateMessageQueueStrategy; @@ -60,7 +63,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class RocketMQSourceConnector implements Source { +public class RocketMQSourceConnector implements Source, ConnectorCreateService { private RocketMQSourceConfig sourceConfig; @@ -77,7 +80,7 @@ public class RocketMQSourceConnector implements Source { private final ConcurrentHashMap> prepareCommitOffset = new ConcurrentHashMap<>(); - private ConcurrentHashMap> queue2Offsets = new ConcurrentHashMap<>(); + private final ConcurrentHashMap> queue2Offsets = new ConcurrentHashMap<>(); private final AtomicInteger unAckCounter = new AtomicInteger(); @@ -141,15 +144,14 @@ public void start() throws Exception { for (MessageQueue messageQueue : mqDivided) { try { - Map partitionMap = new HashMap<>(); - partitionMap.put("topic", messageQueue.getTopic()); - partitionMap.put("brokerName", messageQueue.getBrokerName()); - partitionMap.put("queueId", messageQueue.getQueueId() + ""); - RecordPartition recordPartition = new RecordPartition(partitionMap); + RocketMQRecordPartition recordPartition = new RocketMQRecordPartition(); + recordPartition.setBroker(messageQueue.getBrokerName()); + recordPartition.setTopic(messageQueue.getTopic()); + recordPartition.setQueueId(messageQueue.getQueueId() + ""); RecordOffset recordOffset = offsetStorageReader.readOffset(recordPartition); log.info("assigned messageQueue {}, recordOffset {}", messageQueue, recordOffset); if (recordOffset != null) { - long pollOffset = (Long) recordOffset.getOffset().get("queueOffset"); + long pollOffset = ((RocketMQRecordOffset) recordOffset).getQueueOffset(); if (pollOffset != 0) { consumer.seek(messageQueue, pollOffset); } @@ -186,13 +188,13 @@ private List getMessageQueueList(String topic) throws MQClientExce @Override public void commit(ConnectRecord record) { // send success, commit offset - Map map = record.getPosition().getPartition().getPartition(); - String brokerName = (String) map.get("brokerName"); - String topic = (String) map.get("topic"); - int queueId = Integer.parseInt((String) map.get("queueId")); + RocketMQRecordPartition rocketMQRecordPartition = (RocketMQRecordPartition) (record.getPosition().getRecordPartition()); + String brokerName = rocketMQRecordPartition.getBroker(); + String topic = rocketMQRecordPartition.getTopic(); + int queueId = Integer.parseInt(rocketMQRecordPartition.getQueueId()); MessageQueue mq = new MessageQueue(topic, brokerName, queueId); - Map offsetMap = record.getPosition().getOffset().getOffset(); - long offset = Long.parseLong((String) offsetMap.get("queueOffset")); + RocketMQRecordOffset rocketMQRecordOffset = (RocketMQRecordOffset) record.getPosition().getRecordOffset(); + long offset = rocketMQRecordOffset.getQueueOffset(); long canCommitOffset = removeMessage(mq, offset); log.info("commit record {}|mq {}|canCommitOffset {}", record, mq, canCommitOffset); // commit offset to prepareCommitOffset @@ -204,6 +206,11 @@ public String name() { return this.sourceConfig.getConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { consumer.unsubscribe(sourceConfig.getConnectorConfig().getTopic()); @@ -232,17 +239,18 @@ public List poll() { } public static RecordOffset convertToRecordOffset(Long offset) { - Map offsetMap = new HashMap<>(); - offsetMap.put("queueOffset", offset + ""); - return new RecordOffset(offsetMap); + RocketMQRecordOffset rocketMQRecordOffset = new RocketMQRecordOffset(); + rocketMQRecordOffset.setQueueOffset(offset); + return rocketMQRecordOffset; } public static RecordPartition convertToRecordPartition(String topic, String brokerName, int queueId) { - Map map = new HashMap<>(); - map.put("topic", topic); - map.put("brokerName", brokerName); - map.put("queueId", queueId + ""); - return new RecordPartition(map); + RocketMQRecordPartition rocketMQRecordPartition = new RocketMQRecordPartition(); + rocketMQRecordPartition.setBroker(brokerName); + rocketMQRecordPartition.setTopic(topic); + rocketMQRecordPartition.setQueueId(queueId + ""); + + return rocketMQRecordPartition; } private void putPulledQueueOffset(MessageExt messageExt) { @@ -301,4 +309,9 @@ public void commitOffset(MessageQueue mq, long canCommitOffset) { commitOffset.add(new AtomicLong(nextBeginOffset)); prepareCommitOffset.put(mq, commitOffset); } + + @Override + public Source create() { + return new RocketMQSourceConnector(); + } } diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnectorTest.java index afd13a3f2f..51d77182a0 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/sink/connector/RocketMQSinkConnectorTest.java @@ -21,10 +21,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import org.apache.eventmesh.connector.rocketmq.sink.config.RocketMQSinkConfig; +import org.apache.eventmesh.common.config.connector.mq.rocketmq.RocketMQSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import org.apache.rocketmq.client.producer.DefaultMQProducer; @@ -81,9 +79,7 @@ public void testRocketMQSinkConnector() throws Exception { private List generateMockedRecords(final int messageCount) { List records = new ArrayList<>(); for (int i = 0; i < messageCount; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, System.currentTimeMillis(), + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), EXPECTED_MESSAGE.getBytes(StandardCharsets.UTF_8)); connectRecord.addExtension("id", String.valueOf(UUID.randomUUID())); records.add(connectRecord); diff --git a/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnectorTest.java index 5f5e3410bf..78510e2e4f 100644 --- a/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-rocketmq/src/test/java/org/apache/eventmesh/connector/rocketmq/source/connector/RocketMQSourceConnectorTest.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.rocketmq.source.connector; -import org.apache.eventmesh.connector.rocketmq.source.config.RocketMQSourceConfig; +import org.apache.eventmesh.common.config.connector.mq.rocketmq.RocketMQSourceConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.eventmesh.openconnect.util.ConfigUtil; diff --git a/eventmesh-connectors/eventmesh-connector-s3/build.gradle b/eventmesh-connectors/eventmesh-connector-s3/build.gradle index 9d004b2f15..af2867917e 100644 --- a/eventmesh-connectors/eventmesh-connector-s3/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-s3/build.gradle @@ -16,6 +16,7 @@ */ dependencies { + implementation project(":eventmesh-common") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") implementation 'software.amazon.awssdk:s3' compileOnly 'org.projectlombok:lombok' diff --git a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/config/S3ServerConfig.java b/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/config/S3ServerConfig.java index f5c4371d4c..a422c0468c 100644 --- a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/config/S3ServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/config/S3ServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.s3.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/connector/S3SourceConnector.java b/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/connector/S3SourceConnector.java index 83ac15398b..078ed7691a 100644 --- a/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/connector/S3SourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-s3/src/main/java/org/apache/eventmesh/connector/s3/source/connector/S3SourceConnector.java @@ -17,21 +17,21 @@ package org.apache.eventmesh.connector.s3.source.connector; -import org.apache.eventmesh.connector.s3.source.config.S3SourceConfig; -import org.apache.eventmesh.connector.s3.source.config.SourceConnectorConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.s3.S3SourceConfig; +import org.apache.eventmesh.common.config.connector.s3.SourceConnectorConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.S3.S3RecordOffset; +import org.apache.eventmesh.common.remote.offset.S3.S3RecordPartition; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; @@ -121,6 +121,11 @@ public String name() { return this.sourceConfig.getSourceConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { @@ -157,16 +162,16 @@ public List poll() { } private RecordPartition getRecordPartition() { - Map map = new HashMap<>(); - map.put(REGION, this.sourceConnectorConfig.getRegion()); - map.put(BUCKET, this.sourceConnectorConfig.getBucket()); - map.put(FILE_NAME, this.sourceConnectorConfig.getFileName()); - return new RecordPartition(map); + S3RecordPartition s3RecordPartition = new S3RecordPartition(); + s3RecordPartition.setRegion(this.sourceConnectorConfig.getRegion()); + s3RecordPartition.setBucket(this.sourceConnectorConfig.getBucket()); + s3RecordPartition.setFileName(this.sourceConnectorConfig.getFileName()); + return s3RecordPartition; } private RecordOffset getRecordOffset() { - Map map = new HashMap<>(); - map.put(POSITION, String.valueOf(this.position)); - return new RecordOffset(map); + S3RecordOffset s3RecordOffset = new S3RecordOffset(); + s3RecordOffset.setOffset(this.position); + return s3RecordOffset; } } diff --git a/eventmesh-connectors/eventmesh-connector-s3/src/test/java/org/apache/eventmesh/connector/s3/source/S3SourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-s3/src/test/java/org/apache/eventmesh/connector/s3/source/S3SourceConnectorTest.java index d6bb08d421..4d5d41093b 100644 --- a/eventmesh-connectors/eventmesh-connector-s3/src/test/java/org/apache/eventmesh/connector/s3/source/S3SourceConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-s3/src/test/java/org/apache/eventmesh/connector/s3/source/S3SourceConnectorTest.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.s3.source; -import org.apache.eventmesh.connector.s3.source.config.S3SourceConfig; -import org.apache.eventmesh.connector.s3.source.config.SourceConnectorConfig; +import org.apache.eventmesh.common.config.connector.s3.S3SourceConfig; +import org.apache.eventmesh.common.config.connector.s3.SourceConnectorConfig; import org.apache.eventmesh.connector.s3.source.connector.S3SourceConnector; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; diff --git a/eventmesh-connectors/eventmesh-connector-slack/build.gradle b/eventmesh-connectors/eventmesh-connector-slack/build.gradle index 90b7d9b9b5..665f748b5f 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/build.gradle +++ b/eventmesh-connectors/eventmesh-connector-slack/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation project(":eventmesh-sdks:eventmesh-sdk-java") implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") - implementation "com.slack.api:bolt:1.39.+" + implementation "com.slack.api:bolt:1.42.+" implementation 'com.google.guava:guava' compileOnly 'org.projectlombok:lombok' diff --git a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/config/SlackConnectServerConfig.java b/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/config/SlackConnectServerConfig.java index 515c5af6c7..97479cb87a 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/config/SlackConnectServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/config/SlackConnectServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.slack.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnector.java b/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnector.java index a026f2aa22..836409af71 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-slack/src/main/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.slack.sink.connector; -import org.apache.eventmesh.connector.slack.sink.config.SlackSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.slack.SlackSinkConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -84,6 +84,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() { isRunning = false; diff --git a/eventmesh-connectors/eventmesh-connector-slack/src/test/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-slack/src/test/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnectorTest.java index 3f0a32755f..fc5f04c7e1 100644 --- a/eventmesh-connectors/eventmesh-connector-slack/src/test/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-slack/src/test/java/org/apache/eventmesh/connector/slack/sink/connector/SlackSinkConnectorTest.java @@ -22,10 +22,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import org.apache.eventmesh.connector.slack.sink.config.SlackSinkConfig; +import org.apache.eventmesh.common.config.connector.slack.SlackSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.lang.reflect.Field; @@ -74,9 +72,7 @@ public void testSendMessageToSlack() throws Exception { final int times = 3; List records = new ArrayList<>(); for (int i = 0; i < times; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "Hello, EventMesh!".getBytes(StandardCharsets.UTF_8)); records.add(connectRecord); } diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/config/SpringConnectServerConfig.java b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/config/SpringConnectServerConfig.java index 7eb2009fbd..88cf8156a8 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/config/SpringConnectServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/config/SpringConnectServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.spring.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnector.java b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnector.java index 648e1f8071..9ba99cd547 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnector.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.connector.spring.sink.connector; -import org.apache.eventmesh.connector.spring.sink.config.SpringSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.spring.SpringSinkConfig; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -77,6 +77,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/MessageSendingOperations.java b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/MessageSendingOperations.java index a337c1cd81..5f38914bb1 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/MessageSendingOperations.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/MessageSendingOperations.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.spring.source; -import org.apache.eventmesh.openconnect.api.callback.SendMessageCallback; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendMessageCallback; /** * Operations for sending messages. diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java index 5f4d5c89b7..6efed2db3c 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/main/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnector.java @@ -17,17 +17,19 @@ package org.apache.eventmesh.connector.spring.source.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.spring.SpringSourceConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.spring.SpringRecordOffset; +import org.apache.eventmesh.common.remote.offset.spring.SpringRecordPartition; import org.apache.eventmesh.connector.spring.source.MessageSendingOperations; -import org.apache.eventmesh.connector.spring.source.config.SpringSourceConfig; import org.apache.eventmesh.openconnect.SourceWorker; -import org.apache.eventmesh.openconnect.api.callback.SendMessageCallback; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendMessageCallback; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.util.ArrayList; import java.util.List; @@ -50,14 +52,16 @@ public class SpringSourceConnector implements Source, MessageSendingOperations, private static final String CONNECTOR_PROPERTY_PREFIX = "eventmesh.connector."; - private static final int DEFAULT_BATCH_SIZE = 10; - private ApplicationContext applicationContext; private SpringSourceConfig sourceConfig; private BlockingQueue queue; + private int maxBatchSize; + + private long maxPollWaitTime; + @Override public Class configClass() { return SpringSourceConfig.class; @@ -67,7 +71,7 @@ public Class configClass() { public void init(Config config) throws Exception { // init config for spring source connector this.sourceConfig = (SpringSourceConfig) config; - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); } @Override @@ -75,7 +79,13 @@ public void init(ConnectorContext connectorContext) throws Exception { SourceConnectorContext sourceConnectorContext = (SourceConnectorContext) connectorContext; // init config for spring source connector this.sourceConfig = (SpringSourceConfig) sourceConnectorContext.getSourceConfig(); - this.queue = new LinkedBlockingQueue<>(1000); + doInit(); + } + + private void doInit() { + this.queue = new LinkedBlockingQueue<>(sourceConfig.getPollConfig().getCapacity()); + this.maxBatchSize = sourceConfig.getPollConfig().getMaxBatchSize(); + this.maxPollWaitTime = sourceConfig.getPollConfig().getMaxWaitTime(); } @Override @@ -93,6 +103,11 @@ public String name() { return this.sourceConfig.getSourceConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws Exception { @@ -100,15 +115,21 @@ public void stop() throws Exception { @Override public List poll() { - List connectRecords = new ArrayList<>(DEFAULT_BATCH_SIZE); + long startTime = System.currentTimeMillis(); + long remainingTime = maxPollWaitTime; - for (int count = 0; count < DEFAULT_BATCH_SIZE; ++count) { + List connectRecords = new ArrayList<>(maxBatchSize); + for (int count = 0; count < maxBatchSize; ++count) { try { - ConnectRecord connectRecord = queue.poll(3, TimeUnit.SECONDS); + ConnectRecord connectRecord = queue.poll(remainingTime, TimeUnit.MILLISECONDS); if (connectRecord == null) { break; } connectRecords.add(connectRecord); + + // calculate elapsed time and update remaining time for next poll + long elapsedTime = System.currentTimeMillis() - startTime; + remainingTime = maxPollWaitTime > elapsedTime ? maxPollWaitTime - elapsedTime : 0; } catch (InterruptedException e) { Thread currentThread = Thread.currentThread(); log.warn("[SpringSourceConnector] Interrupting thread {} due to exception {}", @@ -121,12 +142,13 @@ public List poll() { /** * Send message. + * * @param message message to send */ @Override public void send(Object message) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); + RecordPartition partition = new SpringRecordPartition(); + RecordOffset offset = new SpringRecordOffset(); ConnectRecord record = new ConnectRecord(partition, offset, System.currentTimeMillis(), message); addSpringEnvironmentPropertyExtensions(record); queue.offer(record); @@ -134,14 +156,14 @@ public void send(Object message) { /** * Send message with a callback. - * @param message message to send. - * @param workerCallback After the user sends the message to the Connector, - * the SourceWorker will fetch message and invoke. + * + * @param message message to send. + * @param workerCallback After the user sends the message to the Connector, the SourceWorker will fetch message and invoke. */ @Override public void send(Object message, SendMessageCallback workerCallback) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); + RecordPartition partition = new SpringRecordPartition(); + RecordOffset offset = new SpringRecordOffset(); ConnectRecord record = new ConnectRecord(partition, offset, System.currentTimeMillis(), message); record.addExtension(SourceWorker.CALLBACK_EXTENSION, workerCallback); addSpringEnvironmentPropertyExtensions(record); diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnectorTest.java index ea0ea7c359..767c8803de 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/sink/connector/SpringSinkConnectorTest.java @@ -19,10 +19,8 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.apache.eventmesh.connector.spring.sink.config.SpringSinkConfig; +import org.apache.eventmesh.common.config.connector.spring.SpringSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import java.util.ArrayList; import java.util.List; @@ -71,9 +69,7 @@ public void testProcessRecordsInSinkConnectorQueue() throws Exception { private void writeMockedRecords(int count, String message) throws Exception { List records = new ArrayList<>(); for (int i = 0; i < count; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - records.add(new ConnectRecord(partition, offset, System.currentTimeMillis(), message + i)); + records.add(new ConnectRecord(null, null, System.currentTimeMillis(), message + i)); } connector.put(records); } diff --git a/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnectorTest.java b/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnectorTest.java index 65c71aea09..c4c59f0e5f 100644 --- a/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-spring/src/test/java/org/apache/eventmesh/connector/spring/source/connector/SpringSourceConnectorTest.java @@ -19,7 +19,7 @@ import static org.mockito.Mockito.doReturn; -import org.apache.eventmesh.connector.spring.source.config.SpringSourceConfig; +import org.apache.eventmesh.common.config.connector.spring.SpringSourceConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import java.util.List; diff --git a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/config/WeChatConnectServerConfig.java b/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/config/WeChatConnectServerConfig.java index a2634b6b49..2555041c02 100644 --- a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/config/WeChatConnectServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/config/WeChatConnectServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.wechat.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnector.java b/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnector.java index ac66ec45cc..6908d119b9 100644 --- a/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-wechat/src/main/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnector.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.connector.wechat.sink.connector; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.wechat.WeChatSinkConfig; import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.connector.wechat.sink.config.WeChatSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -115,6 +115,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws IOException { isRunning = false; diff --git a/eventmesh-connectors/eventmesh-connector-wechat/src/test/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-wechat/src/test/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnectorTest.java index d993468c18..00432a4e2c 100644 --- a/eventmesh-connectors/eventmesh-connector-wechat/src/test/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-wechat/src/test/java/org/apache/eventmesh/connector/wechat/sink/connector/WeChatSinkConnectorTest.java @@ -21,10 +21,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import org.apache.eventmesh.connector.wechat.sink.config.WeChatSinkConfig; +import org.apache.eventmesh.common.config.connector.wechat.WeChatSinkConfig; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import java.io.IOException; @@ -111,9 +109,7 @@ public void testSendMessageToWeChat() throws Exception { Mockito.doReturn(sendMessageResponse).when(sendMessageRequestCall).execute(); List records = new ArrayList<>(); - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "Hello, EventMesh!".getBytes(StandardCharsets.UTF_8)); records.add(connectRecord); @@ -141,9 +137,7 @@ public void testSendMessageToWeChatAbnormally() throws Exception { Mockito.doReturn(sendMessageRequestCall).when(okHttpClient).newCall(Mockito.argThat(sendMessageMatcher)); Mockito.doReturn(sendMessageResponse).when(sendMessageRequestCall).execute(); - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, + ConnectRecord connectRecord = new ConnectRecord(null, null, System.currentTimeMillis(), "Hello, EventMesh!".getBytes(StandardCharsets.UTF_8)); Method sendMessageMethod = WeChatSinkConnector.class.getDeclaredMethod("sendMessage", ConnectRecord.class); sendMessageMethod.setAccessible(true); diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/config/WeComConnectServerConfig.java b/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/config/WeComConnectServerConfig.java index 1f864726bf..38235b3d3b 100644 --- a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/config/WeComConnectServerConfig.java +++ b/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/config/WeComConnectServerConfig.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.connector.wecom.config; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.config.connector.Config; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/connector/WeComSinkConnector.java b/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/connector/WeComSinkConnector.java index 499104e11a..ca628fa590 100644 --- a/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/connector/WeComSinkConnector.java +++ b/eventmesh-connectors/eventmesh-connector-wecom/src/main/java/org/apache/eventmesh/connector/wecom/sink/connector/WeComSinkConnector.java @@ -18,12 +18,12 @@ package org.apache.eventmesh.connector.wecom.sink.connector; import org.apache.eventmesh.common.Constants; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.wecom.WeComSinkConfig; import org.apache.eventmesh.common.enums.EventMeshDataContentType; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.wecom.config.WeComMessageTemplateType; import org.apache.eventmesh.connector.wecom.constants.ConnectRecordExtensionKeys; -import org.apache.eventmesh.connector.wecom.sink.config.WeComSinkConfig; -import org.apache.eventmesh.openconnect.api.config.Config; import org.apache.eventmesh.openconnect.api.connector.ConnectorContext; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; @@ -95,6 +95,11 @@ public String name() { return this.sinkConfig.getSinkConnectorConfig().getConnectorName(); } + @Override + public void onException(ConnectRecord record) { + + } + @Override public void stop() throws IOException { isRunning = false; diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/MockRecordOffset.java b/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/MockRecordOffset.java new file mode 100644 index 0000000000..066fe3f667 --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/MockRecordOffset.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.wecom.connector; + +import org.apache.eventmesh.common.remote.offset.RecordOffset; + +public class MockRecordOffset extends RecordOffset { + @Override + public Class getRecordOffsetClass() { + return MockRecordOffset.class; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/MockRecordPartition.java b/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/MockRecordPartition.java new file mode 100644 index 0000000000..aae552891f --- /dev/null +++ b/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/MockRecordPartition.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.connector.wecom.connector; + +import org.apache.eventmesh.common.remote.offset.RecordPartition; + +public class MockRecordPartition extends RecordPartition { + @Override + public Class getRecordPartitionClass() { + return MockRecordPartition.class; + } +} diff --git a/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/WeComSinkConnectorTest.java b/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/WeComSinkConnectorTest.java index 4cf9f3523b..64b4e19aa3 100644 --- a/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/WeComSinkConnectorTest.java +++ b/eventmesh-connectors/eventmesh-connector-wecom/src/test/java/org/apache/eventmesh/connector/wecom/connector/WeComSinkConnectorTest.java @@ -21,15 +21,15 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import org.apache.eventmesh.common.config.connector.wecom.WeComSinkConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.wecom.config.WeComMessageTemplateType; import org.apache.eventmesh.connector.wecom.constants.ConnectRecordExtensionKeys; -import org.apache.eventmesh.connector.wecom.sink.config.WeComSinkConfig; import org.apache.eventmesh.connector.wecom.sink.connector.SendMessageResponse; import org.apache.eventmesh.connector.wecom.sink.connector.WeComSinkConnector; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; import org.apache.eventmesh.openconnect.util.ConfigUtil; import org.apache.http.HttpEntity; @@ -73,9 +73,8 @@ public void setUp() throws Exception { Mockito.doReturn(httpEntity).when(mockedResponse).getEntity(); WeComSinkConfig sinkConfig = (WeComSinkConfig) ConfigUtil.parse(connector.configClass()); connector.init(sinkConfig); - Field httpClientField = ReflectionSupport.findFields(connector.getClass(), - (f) -> f.getName().equals("httpClient"), - HierarchyTraversalMode.BOTTOM_UP).get(0); + Field httpClientField = + ReflectionSupport.findFields(connector.getClass(), (f) -> f.getName().equals("httpClient"), HierarchyTraversalMode.BOTTOM_UP).get(0); httpClientField.setAccessible(true); httpClientField.set(connector, httpClient); connector.start(); @@ -89,10 +88,10 @@ public void testSendMessageToWeCom() throws IOException { final int times = 3; List records = new ArrayList<>(); for (int i = 0; i < times; i++) { - RecordPartition partition = new RecordPartition(); - RecordOffset offset = new RecordOffset(); - ConnectRecord connectRecord = new ConnectRecord(partition, offset, - System.currentTimeMillis(), "Hello, EventMesh!".getBytes(StandardCharsets.UTF_8)); + RecordPartition partition = new MockRecordPartition(); + RecordOffset offset = new MockRecordOffset(); + ConnectRecord connectRecord = + new ConnectRecord(partition, offset, System.currentTimeMillis(), "Hello, EventMesh!".getBytes(StandardCharsets.UTF_8)); connectRecord.addExtension(ConnectRecordExtensionKeys.WECOM_MESSAGE_TEMPLATE_TYPE, WeComMessageTemplateType.PLAIN_TEXT.getTemplateType()); records.add(connectRecord); diff --git a/eventmesh-examples/build.gradle b/eventmesh-examples/build.gradle index f732a78439..bd90b83495 100644 --- a/eventmesh-examples/build.gradle +++ b/eventmesh-examples/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.64.0' +def grpcVersion = '1.68.0' dependencies { implementation project(":eventmesh-sdks:eventmesh-sdk-java") diff --git a/eventmesh-examples/src/main/java/org/apache/eventmesh/spring/pub/SpringPubController.java b/eventmesh-examples/src/main/java/org/apache/eventmesh/spring/pub/SpringPubController.java index b7ea8890ee..a734bb6efa 100644 --- a/eventmesh-examples/src/main/java/org/apache/eventmesh/spring/pub/SpringPubController.java +++ b/eventmesh-examples/src/main/java/org/apache/eventmesh/spring/pub/SpringPubController.java @@ -19,9 +19,9 @@ import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.connector.spring.source.connector.SpringSourceConnector; -import org.apache.eventmesh.openconnect.api.callback.SendExcepionContext; -import org.apache.eventmesh.openconnect.api.callback.SendMessageCallback; -import org.apache.eventmesh.openconnect.api.callback.SendResult; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendMessageCallback; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; import java.util.HashMap; import java.util.Map; @@ -53,8 +53,8 @@ public void onSuccess(SendResult sendResult) { } @Override - public void onException(SendExcepionContext sendExcepionContext) { - log.info("Spring source worker send message to EventMesh failed!", sendExcepionContext.getCause()); + public void onException(SendExceptionContext sendExceptionContext) { + log.info("Spring source worker send message to EventMesh failed!", sendExceptionContext.getCause()); } }); return "success!"; diff --git a/eventmesh-transformer/build.gradle b/eventmesh-function/build.gradle similarity index 92% rename from eventmesh-transformer/build.gradle rename to eventmesh-function/build.gradle index ba88591b41..2944f98194 100644 --- a/eventmesh-transformer/build.gradle +++ b/eventmesh-function/build.gradle @@ -14,8 +14,3 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - - -dependencies { - implementation project(":eventmesh-common") -} diff --git a/eventmesh-filter/build.gradle b/eventmesh-function/eventmesh-function-api/build.gradle similarity index 92% rename from eventmesh-filter/build.gradle rename to eventmesh-function/eventmesh-function-api/build.gradle index ba88591b41..2944f98194 100644 --- a/eventmesh-filter/build.gradle +++ b/eventmesh-function/eventmesh-function-api/build.gradle @@ -14,8 +14,3 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - - -dependencies { - implementation project(":eventmesh-common") -} diff --git a/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java new file mode 100644 index 0000000000..8cbb0f9381 --- /dev/null +++ b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/AbstractEventMeshFunctionChain.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.function.api; + +import java.util.ArrayList; +import java.util.List; + +/** + * AbstractEventMeshFunctionChain is an abstract class that implements the {@link EventMeshFunction} interface and provides a framework + * for chaining multiple {@link EventMeshFunction} instances that operate on inputs of type {@code T} and produce outputs of type + * {@code R}. This class can be extended to create specific function chains with customized behavior for different + * data types. + * + *

The primary purpose of this class is to allow the sequential execution of functions, where the output of one + * function is passed as the input to the next function in the chain. The chain can be dynamically modified by adding + * functions either at the beginning or the end of the chain.

+ * + * @param the type of the input to the function + * @param the type of the result of the function + */ +public abstract class AbstractEventMeshFunctionChain implements EventMeshFunction { + + protected final List> functions; + + /** + * Default constructor that initializes an empty function chain. + */ + public AbstractEventMeshFunctionChain() { + this.functions = new ArrayList<>(); + } + + /** + * Constructor that initializes the function chain with a given list of functions. The functions will be executed + * in the order they are provided when the {@link #apply(Object)} method is called. + * + * @param functions the initial list of functions to be added to the chain + */ + public AbstractEventMeshFunctionChain(List> functions) { + this.functions = functions; + } + + /** + * Adds a {@link EventMeshFunction} to the beginning of the chain. The function will be executed first when the + * {@link #apply(Object)} method is called. + * + * @param function the function to be added to the beginning of the chain + */ + public void addFirst(EventMeshFunction function) { + this.functions.add(0, function); + } + + /** + * Adds a {@link EventMeshFunction} to the end of the chain. The function will be executed in sequence after all previously + * added functions when the {@link #apply(Object)} method is called. + * + * @param function the function to be added to the end of the chain + */ + public void addLast(EventMeshFunction function) { + this.functions.add(function); + } +} \ No newline at end of file diff --git a/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java new file mode 100644 index 0000000000..973f097ae0 --- /dev/null +++ b/eventmesh-function/eventmesh-function-api/src/main/java/org/apache/eventmesh/function/api/EventMeshFunction.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.function.api; + +/** + * EventMesh Interface for a function that accepts one argument and produces a result. This is a functional interface whose functional method is + * {@link #apply(Object)}. + * + *

This interface is similar to {@link java.util.function.Function}, + * but it is specifically designed for use within the EventMesh. It allows defining custom functions to process data or events in the EventMesh. The + * main use case is to encapsulate operations that can be passed around and applied to data or event messages in the EventMesh processing + * pipeline.

+ * + * @param the type of the input to the function + * @param the type of the result of the function + */ +public interface EventMeshFunction { + + /** + * Applies this function to the given argument within the context of the EventMesh module. This method encapsulates the logic for processing the + * input data and producing a result, which can be used in the EventMesh event processing pipeline. + * + * @param t the function argument, representing the input data or event to be processed + * @return the function result, representing the processed output + */ + R apply(T t); + +} \ No newline at end of file diff --git a/eventmesh-function/eventmesh-function-filter/build.gradle b/eventmesh-function/eventmesh-function-filter/build.gradle new file mode 100644 index 0000000000..21e28d7baf --- /dev/null +++ b/eventmesh-function/eventmesh-function-filter/build.gradle @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +dependencies { + implementation project(":eventmesh-common") + implementation project(":eventmesh-function:eventmesh-function-api") +} \ No newline at end of file diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/PatternEntry.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/PatternEntry.java similarity index 94% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/PatternEntry.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/PatternEntry.java index 5a2493a371..acc2d5f073 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/PatternEntry.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/PatternEntry.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.filter; +package org.apache.eventmesh.function.filter; -import org.apache.eventmesh.filter.condition.Condition; +import org.apache.eventmesh.function.filter.condition.Condition; import java.util.ArrayList; import java.util.List; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/AnythingButCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/AnythingButCondition.java similarity index 97% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/AnythingButCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/AnythingButCondition.java index 2d58136a70..d4f209225e 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/AnythingButCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/AnythingButCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import java.util.ArrayList; import java.util.Iterator; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/Condition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/Condition.java similarity index 94% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/Condition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/Condition.java index fbb4276c7b..9890d5e0d3 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/Condition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/Condition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ConditionsBuilder.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ConditionsBuilder.java similarity index 97% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ConditionsBuilder.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ConditionsBuilder.java index 4e207663aa..961be85e5b 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ConditionsBuilder.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ConditionsBuilder.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ExistsCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ExistsCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ExistsCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ExistsCondition.java index 53c15bb297..c085ba6585 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/ExistsCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/ExistsCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/NumericCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/NumericCondition.java similarity index 97% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/NumericCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/NumericCondition.java index 5eb5374c7c..40eb16a75e 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/NumericCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/NumericCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import java.util.ArrayList; import java.util.List; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/PrefixCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/PrefixCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/PrefixCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/PrefixCondition.java index 633ed1fb02..ff5d0313ce 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/PrefixCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/PrefixCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SpecifiedCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SpecifiedCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SpecifiedCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SpecifiedCondition.java index f9cc3fb5db..9eefb6b641 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SpecifiedCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SpecifiedCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SuffixCondition.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SuffixCondition.java similarity index 95% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SuffixCondition.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SuffixCondition.java index 805df0ee17..090df24834 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/condition/SuffixCondition.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/condition/SuffixCondition.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.condition; +package org.apache.eventmesh.function.filter.condition; import com.fasterxml.jackson.databind.JsonNode; diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/pattern/Pattern.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/pattern/Pattern.java similarity index 75% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/pattern/Pattern.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/pattern/Pattern.java index 8abb306b84..955d9f59ef 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/pattern/Pattern.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/pattern/Pattern.java @@ -15,10 +15,11 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.pattern; +package org.apache.eventmesh.function.filter.pattern; import org.apache.eventmesh.common.utils.JsonPathUtils; -import org.apache.eventmesh.filter.PatternEntry; +import org.apache.eventmesh.function.api.EventMeshFunction; +import org.apache.eventmesh.function.filter.PatternEntry; import org.apache.commons.lang3.StringUtils; @@ -29,12 +30,11 @@ import com.fasterxml.jackson.databind.JsonNode; import com.jayway.jsonpath.PathNotFoundException; -public class Pattern { - private List requiredFieldList = new ArrayList<>(); - private List dataList = new ArrayList<>(); +public class Pattern implements EventMeshFunction { - private String content; + private final List requiredFieldList = new ArrayList<>(); + private final List dataList = new ArrayList<>(); public void addRequiredFieldList(PatternEntry patternEntry) { this.requiredFieldList.add(patternEntry); @@ -45,19 +45,22 @@ public void addDataList(PatternEntry patternEntry) { } public boolean filter(String content) { - this.content = content; - // this.jsonNode = JacksonUtils.STRING_TO_JSONNODE(content); + return matchRequiredFieldList(content, requiredFieldList) && matchRequiredFieldList(content, dataList); + } - return matchRequiredFieldList(requiredFieldList) && matchRequiredFieldList(dataList); + @Override + public String apply(String content) { + // filter content + return filter(content) ? content : null; } - private boolean matchRequiredFieldList(List dataList) { + private boolean matchRequiredFieldList(String content, List dataList) { for (final PatternEntry patternEntry : dataList) { JsonNode jsonElement = null; try { // content:filter - String matchRes = JsonPathUtils.matchJsonPathValue(this.content, patternEntry.getPatternPath()); + String matchRes = JsonPathUtils.matchJsonPathValue(content, patternEntry.getPatternPath()); if (StringUtils.isNoneBlank(matchRes)) { jsonElement = JsonPathUtils.parseStrict(matchRes); diff --git a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/patternbuild/PatternBuilder.java b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/patternbuild/PatternBuilder.java similarity index 85% rename from eventmesh-filter/src/main/java/org/apache/eventmesh/filter/patternbuild/PatternBuilder.java rename to eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/patternbuild/PatternBuilder.java index 5f9a71d262..60193a4efa 100644 --- a/eventmesh-filter/src/main/java/org/apache/eventmesh/filter/patternbuild/PatternBuilder.java +++ b/eventmesh-function/eventmesh-function-filter/src/main/java/org/apache/eventmesh/function/filter/patternbuild/PatternBuilder.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.eventmesh.filter.patternbuild; +package org.apache.eventmesh.function.filter.patternbuild; import org.apache.eventmesh.common.exception.JsonException; -import org.apache.eventmesh.filter.PatternEntry; -import org.apache.eventmesh.filter.condition.Condition; -import org.apache.eventmesh.filter.condition.ConditionsBuilder; -import org.apache.eventmesh.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.PatternEntry; +import org.apache.eventmesh.function.filter.condition.Condition; +import org.apache.eventmesh.function.filter.condition.ConditionsBuilder; +import org.apache.eventmesh.function.filter.pattern.Pattern; import java.util.ArrayDeque; import java.util.Iterator; @@ -38,19 +38,33 @@ public class PatternBuilder { private static final ObjectMapper mapper = new ObjectMapper(); - public static Pattern build(String jsonStr) { - Pattern pattern = new Pattern(); - JsonNode jsonNode = null; + public static Pattern build(String jsonStr) { try { - jsonNode = mapper.readTree(jsonStr); + JsonNode jsonNode = mapper.readTree(jsonStr); + if (jsonNode.isEmpty() || !jsonNode.isObject()) { + return null; + } + return build(jsonNode); } catch (Exception e) { throw new JsonException("INVALID_JSON_STRING", e); } + } - if (jsonNode.isEmpty() || !jsonNode.isObject()) { - return null; + public static Pattern build(Map conditionMap) { + try { + JsonNode jsonNode = mapper.valueToTree(conditionMap); + if (jsonNode.isEmpty() || !jsonNode.isObject()) { + return null; + } + return build(jsonNode); + } catch (Exception e) { + throw new JsonException("INVALID_MAP", e); } + } + + public static Pattern build(JsonNode jsonNode) { + Pattern pattern = new Pattern(); // iter all json data Iterator> iterator = jsonNode.fields(); diff --git a/eventmesh-filter/src/test/java/org/apache/eventmesh/filter/PatternTest.java b/eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function/filter/PatternTest.java similarity index 82% rename from eventmesh-filter/src/test/java/org/apache/eventmesh/filter/PatternTest.java rename to eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function/filter/PatternTest.java index 207992b0c1..bc0aeff4ea 100644 --- a/eventmesh-filter/src/test/java/org/apache/eventmesh/filter/PatternTest.java +++ b/eventmesh-function/eventmesh-function-filter/src/test/java/org/apache/eventmesh/function/filter/PatternTest.java @@ -15,10 +15,15 @@ * limitations under the License. */ -package org.apache.eventmesh.filter; +package org.apache.eventmesh.function.filter; -import org.apache.eventmesh.filter.pattern.Pattern; -import org.apache.eventmesh.filter.patternbuild.PatternBuilder; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.patternbuild.PatternBuilder; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -144,4 +149,20 @@ public void testAnythingButFilter() { Assertions.assertEquals(false, res); } + @Test + public void testPrefixFilterMap() { + // Create the inner Map representing {prefix=eventmesh.} + Map innerMap = new HashMap<>(); + innerMap.put("prefix", "eventmesh."); + // Create a List representing [{prefix=eventmesh.}] + List> sourceList = Collections.singletonList(innerMap); + // Create the condition representing {source=[{prefix=eventmesh.}]} + Map condition = new HashMap<>(); + condition.put("source", sourceList); + + Pattern pattern = PatternBuilder.build(condition); + Boolean res = pattern.filter(event); + Assertions.assertEquals(true, res); + } + } diff --git a/eventmesh-function/eventmesh-function-transformer/build.gradle b/eventmesh-function/eventmesh-function-transformer/build.gradle new file mode 100644 index 0000000000..6939bbd483 --- /dev/null +++ b/eventmesh-function/eventmesh-function-transformer/build.gradle @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +dependencies { + implementation project(":eventmesh-common") + implementation project(":eventmesh-function:eventmesh-function-api") +} diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/ConstantTransformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/ConstantTransformer.java similarity index 95% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/ConstantTransformer.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/ConstantTransformer.java index dd7c20aace..ae77f149f7 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/ConstantTransformer.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/ConstantTransformer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; public class ConstantTransformer implements Transformer { diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/JsonPathParser.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/JsonPathParser.java similarity index 85% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/JsonPathParser.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/JsonPathParser.java index a0ebde12d2..c578310dc4 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/JsonPathParser.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/JsonPathParser.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import org.apache.eventmesh.common.utils.JsonPathUtils; @@ -35,6 +35,19 @@ public List getVariablesList() { return variablesList; } + /** + * parser input jsonpath map into variable list + * + * @param jsonPathMap jsonpath map + */ + public JsonPathParser(Map jsonPathMap) { + for (Map.Entry entry : jsonPathMap.entrySet()) { + String name = entry.getKey(); + String value = entry.getValue(); + variablesList.add(new Variable(name, value)); + } + } + /** * parser input jsonpath string into variable list * diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/OriginalTransformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/OriginalTransformer.java similarity index 94% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/OriginalTransformer.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/OriginalTransformer.java index 61aa059d59..59ce0350eb 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/OriginalTransformer.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/OriginalTransformer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; class OriginalTransformer implements Transformer { @@ -23,4 +23,5 @@ class OriginalTransformer implements Transformer { public String transform(String json) { return json; } + } diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Template.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Template.java similarity index 96% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Template.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Template.java index 19c3b5cec3..29d975c371 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Template.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Template.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import org.apache.commons.text.StringSubstitutor; diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TemplateTransformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TemplateTransformer.java similarity index 96% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TemplateTransformer.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TemplateTransformer.java index bc9907ff48..69cee68269 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TemplateTransformer.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TemplateTransformer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import java.util.List; diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformException.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformException.java similarity index 95% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformException.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformException.java index 1b11a29d80..aeb827fc88 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformException.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; /** * Transform exception diff --git a/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java new file mode 100644 index 0000000000..be0e815808 --- /dev/null +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Transformer.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.function.transformer; + +import org.apache.eventmesh.common.exception.EventMeshException; +import org.apache.eventmesh.function.api.EventMeshFunction; + +import com.fasterxml.jackson.core.JsonProcessingException; + +/** + * EventMesh transformer interface, specified transformer implementation includes: + * 1. Constant + * 2. Original + * 3. Template + */ +public interface Transformer extends EventMeshFunction { + + String transform(String json) throws JsonProcessingException; + + @Override + default String apply(String content) { + try { + return transform(content); + } catch (JsonProcessingException e) { + throw new EventMeshException("Failed to transform content", e); + } + } + +} diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerBuilder.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerBuilder.java similarity index 69% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerBuilder.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerBuilder.java index e7277af73c..916f1ef7bc 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerBuilder.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerBuilder.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; + +import java.util.Map; public class TransformerBuilder { @@ -32,9 +34,23 @@ public static Transformer buildTransformer(TransformerParam transformerParam) { } } - public static Transformer buildTemplateTransFormer(String jsonContent, String template) { - JsonPathParser jsonPathParser = new JsonPathParser(jsonContent); + /** + * build template transformer + * @param jsonContent json content, support string and map, other type will throw IllegalArgumentException + * @param template template string + * @return transformer + */ + @SuppressWarnings("unchecked") + public static Transformer buildTemplateTransFormer(Object jsonContent, String template) { Template templateEntry = new Template(template); + JsonPathParser jsonPathParser; + if (jsonContent instanceof String) { + jsonPathParser = new JsonPathParser((String) jsonContent); + } else if (jsonContent instanceof Map) { + jsonPathParser = new JsonPathParser((Map) jsonContent); + } else { + throw new TransformException("invalid json content"); + } return new TemplateTransformer(jsonPathParser, templateEntry); } diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerParam.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerParam.java similarity index 97% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerParam.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerParam.java index d747d7be4c..915111e01d 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerParam.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerParam.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; public class TransformerParam { diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerType.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerType.java similarity index 97% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerType.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerType.java index 2dc7809478..969c49ce80 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/TransformerType.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/TransformerType.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; import java.util.Objects; diff --git a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Variable.java b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Variable.java similarity index 96% rename from eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Variable.java rename to eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Variable.java index c9259d335c..aee80e1454 100644 --- a/eventmesh-transformer/src/main/java/org/apache/eventmesh/transformer/Variable.java +++ b/eventmesh-function/eventmesh-function-transformer/src/main/java/org/apache/eventmesh/function/transformer/Variable.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; public class Variable { diff --git a/eventmesh-transformer/src/test/java/org/apache/eventmesh/transformer/TransformTest.java b/eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function/transformer/TransformTest.java similarity index 88% rename from eventmesh-transformer/src/test/java/org/apache/eventmesh/transformer/TransformTest.java rename to eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function/transformer/TransformTest.java index a55cde0baf..f9a444e8f9 100644 --- a/eventmesh-transformer/src/test/java/org/apache/eventmesh/transformer/TransformTest.java +++ b/eventmesh-function/eventmesh-function-transformer/src/test/java/org/apache/eventmesh/function/transformer/TransformTest.java @@ -15,7 +15,10 @@ * limitations under the License. */ -package org.apache.eventmesh.transformer; +package org.apache.eventmesh.function.transformer; + +import java.util.Collections; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -139,4 +142,19 @@ public void testTemplateTransFormerWithConstant() throws JsonProcessingException output); } + @Test + public void testTemplateTransFormerWithStringValueMap() throws JsonProcessingException { + Map content = Collections.singletonMap("data-name", "$.data.name"); + + String template = "Transformers test:data name is ${data-name}"; + Transformer transform = TransformerBuilder.buildTemplateTransFormer(content, template); + String output = transform.transform(EVENT); + Assertions.assertEquals("Transformers test:data name is test-transformer", output); + + Transformer transformer1 = TransformerBuilder.buildTemplateTransFormer(content, template); + String output1 = transformer1.transform(EVENT); + Assertions.assertEquals("Transformers test:data name is test-transformer", output1); + + } + } diff --git a/eventmesh-meta/eventmesh-meta-etcd/build.gradle b/eventmesh-meta/eventmesh-meta-etcd/build.gradle index 733776548b..201f3f36b9 100644 --- a/eventmesh-meta/eventmesh-meta-etcd/build.gradle +++ b/eventmesh-meta/eventmesh-meta-etcd/build.gradle @@ -16,7 +16,7 @@ */ dependencies { - implementation ("io.etcd:jetcd-core:0.3.0") + implementation ("io.etcd:jetcd-core:0.8.4") implementation project(":eventmesh-meta:eventmesh-meta-api") implementation project(":eventmesh-common") testImplementation "org.mockito:mockito-core" diff --git a/eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java b/eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java new file mode 100644 index 0000000000..a20564ae01 --- /dev/null +++ b/eventmesh-meta/eventmesh-meta-etcd/src/test/java/org/apache/eventmesh/registry/etcd/service/EtcdCustomServiceTest.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry.etcd.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +import org.apache.eventmesh.api.meta.bo.EventMeshAppSubTopicInfo; +import org.apache.eventmesh.api.meta.bo.EventMeshServicePubTopicInfo; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.meta.etcd.service.EtcdCustomService; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.junit.jupiter.MockitoExtension; + +import io.etcd.jetcd.ByteSequence; +import io.etcd.jetcd.Client; +import io.etcd.jetcd.KV; +import io.etcd.jetcd.KeyValue; +import io.etcd.jetcd.kv.GetResponse; +import io.etcd.jetcd.options.GetOption; + +@ExtendWith(MockitoExtension.class) +public class EtcdCustomServiceTest { + + @Mock + private Client etcdClient; + + @Mock + private KV kvClient; + + @Mock + private KeyValue keyValue; + + @Mock + private GetResponse getResponse; + + @Mock + private CompletableFuture futureResponse; + + @InjectMocks + private EtcdCustomService etcdCustomService; + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + when(etcdClient.getKVClient()).thenReturn(kvClient); + } + + @Test + public void testFindEventMeshServicePubTopicInfos() throws Exception { + + EventMeshServicePubTopicInfo mockInfo = new EventMeshServicePubTopicInfo(); + mockInfo.setService("testService"); + mockInfo.setTopics(Collections.unmodifiableSet(new HashSet<>(Arrays.asList("topic1", "topic2")))); + + String mockValue = JsonUtils.toJSONString(mockInfo); + ByteSequence mockByteSequence = ByteSequence.from(mockValue, StandardCharsets.UTF_8); + + when(keyValue.getValue()).thenReturn(mockByteSequence); + when(getResponse.getKvs()).thenReturn(Arrays.asList(keyValue)); + when(futureResponse.get()).thenReturn(getResponse); + when(kvClient.get(any(ByteSequence.class), any(GetOption.class))).thenReturn(futureResponse); + + List result = etcdCustomService.findEventMeshServicePubTopicInfos(); + assertNotNull(result); + assertEquals(1, result.size()); + EventMeshServicePubTopicInfo resultInfo = result.get(0); + assertEquals("testService", resultInfo.getService()); + assertEquals(new HashSet<>(Arrays.asList("topic1", "topic2")), resultInfo.getTopics()); + } + + + @Test + public void testFindEventMeshAppSubTopicInfoByGroup() throws Exception { + + String group = "testGroup"; + EventMeshAppSubTopicInfo mockInfo = new EventMeshAppSubTopicInfo(); + + String mockValue = JsonUtils.toJSONString(mockInfo); + ByteSequence mockByteSequence = ByteSequence.from(mockValue, StandardCharsets.UTF_8); + + when(keyValue.getValue()).thenReturn(mockByteSequence); + when(kvClient.get(any(ByteSequence.class), any(GetOption.class))).thenReturn(futureResponse); + when(futureResponse.get()).thenReturn(getResponse); + when(getResponse.getKvs()).thenReturn(Collections.singletonList(keyValue)); + + EventMeshAppSubTopicInfo result = etcdCustomService.findEventMeshAppSubTopicInfoByGroup(group); + + assertNotNull(result); + } + +} diff --git a/eventmesh-meta/eventmesh-meta-raft/build.gradle b/eventmesh-meta/eventmesh-meta-raft/build.gradle index 386b5c6e72..210e348c86 100644 --- a/eventmesh-meta/eventmesh-meta-raft/build.gradle +++ b/eventmesh-meta/eventmesh-meta-raft/build.gradle @@ -16,13 +16,15 @@ */ plugins { - id 'com.google.protobuf' version '0.8.17' + id 'com.google.protobuf' version '0.9.4' } -def grpcVersion = '1.50.2' // CURRENT_GRPC_VERSION -def protobufVersion = '3.21.5' +def grpcVersion = '1.68.0' +def protobufVersion = '3.25.4' def protocVersion = protobufVersion +def jraftVersion = '1.3.14' + dependencies { implementation ("io.grpc:grpc-protobuf:${grpcVersion}") { exclude group: "com.google.protobuf", module: "protobuf-java" @@ -36,10 +38,9 @@ dependencies { implementation project(":eventmesh-meta:eventmesh-meta-api") implementation project(":eventmesh-common") - implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.0.1' - implementation "com.alipay.sofa:jraft-core:1.3.14" - implementation "com.alipay.sofa:rpc-grpc-impl:1.3.14" - testImplementation 'org.junit.jupiter:junit-jupiter:5.6.0' + implementation "com.alipay.sofa:jraft-core:${jraftVersion}" + implementation "com.alipay.sofa:rpc-grpc-impl:${jraftVersion}" + testImplementation 'org.junit.jupiter:junit-jupiter' } protobuf { diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/Application.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/Application.java index 3252a52e3e..b28f6387a9 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/Application.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/Application.java @@ -17,10 +17,10 @@ package org.apache.eventmesh.openconnect; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import org.apache.eventmesh.openconnect.api.ConnectorCreateService; -import org.apache.eventmesh.openconnect.api.config.Config; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; import org.apache.eventmesh.openconnect.api.connector.Connector; import org.apache.eventmesh.openconnect.api.sink.Sink; import org.apache.eventmesh.openconnect.api.source.Source; diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SinkWorker.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SinkWorker.java index a62b1b19ba..57ad4b8ec3 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SinkWorker.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SinkWorker.java @@ -22,11 +22,11 @@ import org.apache.eventmesh.client.tcp.common.MessageUtils; import org.apache.eventmesh.client.tcp.common.ReceiveMsgHook; import org.apache.eventmesh.client.tcp.conf.EventMeshTCPClientConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import org.apache.eventmesh.common.protocol.SubscriptionMode; import org.apache.eventmesh.common.protocol.SubscriptionType; import org.apache.eventmesh.common.protocol.tcp.UserAgent; import org.apache.eventmesh.common.utils.SystemUtils; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; import org.apache.eventmesh.openconnect.api.sink.Sink; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SourceWorker.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SourceWorker.java index 6dcbcd2994..2a2162a7af 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SourceWorker.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/SourceWorker.java @@ -24,19 +24,19 @@ import org.apache.eventmesh.client.tcp.common.MessageUtils; import org.apache.eventmesh.client.tcp.conf.EventMeshTCPClientConfig; import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; import org.apache.eventmesh.common.exception.EventMeshException; import org.apache.eventmesh.common.protocol.tcp.OPStatus; import org.apache.eventmesh.common.protocol.tcp.Package; import org.apache.eventmesh.common.protocol.tcp.UserAgent; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.common.utils.SystemUtils; -import org.apache.eventmesh.openconnect.api.callback.SendExcepionContext; -import org.apache.eventmesh.openconnect.api.callback.SendMessageCallback; -import org.apache.eventmesh.openconnect.api.callback.SendResult; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; import org.apache.eventmesh.openconnect.api.source.Source; -import org.apache.eventmesh.openconnect.offsetmgmt.api.config.OffsetStorageConfig; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendMessageCallback; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffsetManagement; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.DefaultOffsetManagementServiceImpl; @@ -152,8 +152,8 @@ public void init() { .map(storageType -> EventMeshExtensionFactory.getExtension(OffsetManagementService.class, storageType)) .orElse(new DefaultOffsetManagementServiceImpl()); this.offsetManagementService.initialize(offsetStorageConfig); - this.offsetStorageWriter = new OffsetStorageWriterImpl(source.name(), offsetManagementService); - this.offsetStorageReader = new OffsetStorageReaderImpl(source.name(), offsetManagementService); + this.offsetStorageWriter = new OffsetStorageWriterImpl(offsetManagementService); + this.offsetStorageReader = new OffsetStorageReaderImpl(offsetManagementService); } @Override @@ -264,8 +264,8 @@ private SendResult convertToSendResult(CloudEvent event) { return result; } - private SendExcepionContext convertToExceptionContext(CloudEvent event, Throwable cause) { - SendExcepionContext exceptionContext = new SendExcepionContext(); + private SendExceptionContext convertToExceptionContext(CloudEvent event, Throwable cause) { + SendExceptionContext exceptionContext = new SendExceptionContext(); exceptionContext.setTopic(event.getId()); exceptionContext.setMessageId(event.getId()); exceptionContext.setCause(cause); diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/Connector.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/Connector.java index 82993b198b..07e44aea94 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/Connector.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/Connector.java @@ -17,13 +17,14 @@ package org.apache.eventmesh.openconnect.api.connector; -import org.apache.eventmesh.openconnect.api.config.Config; +import org.apache.eventmesh.common.ComponentLifeCycle; +import org.apache.eventmesh.common.config.connector.Config; import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; /** * Connector */ -public interface Connector { +public interface Connector extends ComponentLifeCycle { /** * Returns the class type of the configuration for this Connector. @@ -33,8 +34,7 @@ public interface Connector { Class configClass(); /** - * This init method is obsolete. For detailed discussion, - * please see here + * This init method is obsolete. For detailed discussion, please see here *

* Initializes the Connector with the provided configuration. * @@ -52,13 +52,6 @@ public interface Connector { */ void init(ConnectorContext connectorContext) throws Exception; - /** - * Starts the Connector. - * - * @throws Exception if the start operation fails - */ - void start() throws Exception; - /** * Commits the specified ConnectRecord object. * @@ -74,10 +67,11 @@ public interface Connector { String name(); /** - * Stops the Connector. + * This method will be called when an exception occurs while processing a ConnectRecord object. This method can be used to handle the exception, + * such as logging error information, or stopping the connector's operation when an exception occurs. * - * @throws Exception if stopping fails + * @param record The ConnectRecord object that was being processed when the exception occurred */ - void stop() throws Exception; + void onException(ConnectRecord record); } diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnector.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnector.java index 70b62c1200..9b271746f3 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnector.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnector.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.openconnect.api.connector; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; import org.apache.eventmesh.openconnect.api.sink.Sink; public abstract class SinkConnector implements Sink { diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java index a7aef4fff2..1ef048b06c 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SinkConnectorContext.java @@ -17,7 +17,10 @@ package org.apache.eventmesh.openconnect.api.connector; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; +import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.remote.job.JobType; + +import java.util.Map; import lombok.Data; @@ -29,4 +32,8 @@ public class SinkConnectorContext implements ConnectorContext { public SinkConfig sinkConfig; + public Map runtimeConfig; + + public JobType jobType; + } diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnector.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnector.java index 100612662d..95279c2d41 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnector.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnector.java @@ -17,7 +17,7 @@ package org.apache.eventmesh.openconnect.api.connector; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import org.apache.eventmesh.openconnect.api.source.Source; public abstract class SourceConnector implements Source { diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java index 14fd92218a..957452bb10 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/connector/SourceConnectorContext.java @@ -17,9 +17,14 @@ package org.apache.eventmesh.openconnect.api.connector; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.common.remote.offset.RecordPosition; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetStorageReader; +import java.util.List; +import java.util.Map; + import lombok.Data; /** @@ -32,4 +37,11 @@ public class SourceConnectorContext implements ConnectorContext { public SourceConfig sourceConfig; + public Map runtimeConfig; + + public JobType jobType; + + // initial record position + public List recordPositionList; + } diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/factory/ConnectorPluginFactory.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/factory/ConnectorPluginFactory.java new file mode 100644 index 0000000000..7a6f2e0a9c --- /dev/null +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/factory/ConnectorPluginFactory.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.openconnect.api.factory; + +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.SinkConnector; +import org.apache.eventmesh.openconnect.api.connector.SourceConnector; +import org.apache.eventmesh.spi.EventMeshExtensionFactory; + + +/** + * The factory to get connector {@link SourceConnector} and {@link SinkConnector} + */ +public class ConnectorPluginFactory { + + /** + * Get ConnectorCreateService instance by plugin name + * + * @param connectorPluginName plugin name + * @return ConnectorCreateService instance + */ + public static ConnectorCreateService createConnector(String connectorPluginName) { + return EventMeshExtensionFactory.getExtension(ConnectorCreateService.class, connectorPluginName); + } + +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/util/ConfigUtil.java b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/util/ConfigUtil.java index 93b2ba9582..066dae3385 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/util/ConfigUtil.java +++ b/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/util/ConfigUtil.java @@ -17,14 +17,15 @@ package org.apache.eventmesh.openconnect.util; -import org.apache.eventmesh.openconnect.api.config.Config; -import org.apache.eventmesh.openconnect.api.config.Constants; -import org.apache.eventmesh.openconnect.api.config.SinkConfig; -import org.apache.eventmesh.openconnect.api.config.SourceConfig; +import org.apache.eventmesh.common.config.connector.Config; +import org.apache.eventmesh.common.config.connector.Constants; +import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; import java.io.File; import java.io.FileNotFoundException; import java.net.URL; +import java.util.Map; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; @@ -67,6 +68,11 @@ public static T parse(Class c, String filePathName) throws Exception { return objectMapper.readValue(url, c); } + public static T parse(Map map, Class c) throws Exception { + ObjectMapper objectMapper = new ObjectMapper(); + return objectMapper.convertValue(map, c); + } + private static Config parseSourceConfig(Class c) throws Exception { String configFile = System.getProperty(Constants.ENV_SOURCE_CONFIG_FILE, System.getenv(Constants.ENV_SOURCE_CONFIG_FILE)); if (configFile == null || configFile.isEmpty()) { diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/build.gradle b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/build.gradle new file mode 100644 index 0000000000..70defef627 --- /dev/null +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/build.gradle @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +dependencies { + implementation project(":eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api") + implementation project(":eventmesh-common") + testImplementation "org.mockito:mockito-core" + + compileOnly 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' + + implementation "io.grpc:grpc-core" + implementation "io.grpc:grpc-protobuf" + implementation "io.grpc:grpc-stub" + implementation "io.grpc:grpc-netty" + implementation "io.grpc:grpc-netty-shaded" +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/gradle.properties b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/gradle.properties new file mode 100644 index 0000000000..09957a9d24 --- /dev/null +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/gradle.properties @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +pluginType=offsetMgmt +pluginName=admin \ No newline at end of file diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java new file mode 100644 index 0000000000..993352a979 --- /dev/null +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/admin/AdminOffsetService.java @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.openconnect.offsetmgmt.admin; + +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceBlockingStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.TaskState; +import org.apache.eventmesh.common.remote.datasource.DataSourceType; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.common.remote.request.FetchPositionRequest; +import org.apache.eventmesh.common.remote.request.ReportPositionRequest; +import org.apache.eventmesh.common.remote.response.FetchPositionResponse; +import org.apache.eventmesh.common.utils.IPUtils; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.KeyValueStore; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.MemoryBasedKeyValueStore; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetManagementService; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Random; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.stub.StreamObserver; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.google.protobuf.Any; +import com.google.protobuf.UnsafeByteOperations; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class AdminOffsetService implements OffsetManagementService { + + private String adminServerAddr; + + private ManagedChannel channel; + + private AdminServiceStub adminServiceStub; + + private AdminServiceBlockingStub adminServiceBlockingStub; + + StreamObserver responseObserver; + + StreamObserver requestObserver; + + public KeyValueStore positionStore; + + private String jobId; + + private TaskState jobState; + + private DataSourceType dataSourceType; + + private DataSourceType dataSinkType; + + + @Override + public void start() { + + } + + @Override + public void stop() { + + } + + @Override + public void configure(OffsetStorageConfig config) { + OffsetManagementService.super.configure(config); + } + + @Override + public void persist() { + Map recordMap = positionStore.getKVMap(); + + List recordToSyncList = new ArrayList<>(); + for (Map.Entry entry : recordMap.entrySet()) { + RecordPosition recordPosition = new RecordPosition(entry.getKey(), entry.getValue()); + recordToSyncList.add(recordPosition); + } + + ReportPositionRequest reportPositionRequest = new ReportPositionRequest(); + reportPositionRequest.setJobID(jobId); + reportPositionRequest.setState(jobState); + reportPositionRequest.setDataSourceType(dataSourceType); + reportPositionRequest.setAddress(IPUtils.getLocalAddress()); + + reportPositionRequest.setRecordPositionList(recordToSyncList); + + log.debug("start report position request: {}", JsonUtils.toJSONString(reportPositionRequest)); + + Metadata metadata = Metadata.newBuilder() + .setType(ReportPositionRequest.class.getSimpleName()) + .build(); + Payload payload = Payload.newBuilder() + .setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportPositionRequest)))) + .build()) + .build(); + requestObserver.onNext(payload); + log.debug("end report position request: {}", JsonUtils.toJSONString(reportPositionRequest)); + + for (Map.Entry entry : recordMap.entrySet()) { + positionStore.remove(entry.getKey()); + } + } + + @Override + public void load() { + + } + + @Override + public void synchronize() { + + } + + @Override + public Map getPositionMap() { + // get from memory storage first + if (positionStore.getKVMap() == null || positionStore.getKVMap().isEmpty()) { + log.info("fetch position from admin server"); + FetchPositionRequest fetchPositionRequest = new FetchPositionRequest(); + fetchPositionRequest.setJobID(jobId); + fetchPositionRequest.setAddress(IPUtils.getLocalAddress()); + fetchPositionRequest.setDataSourceType(dataSourceType); + + Metadata metadata = Metadata.newBuilder() + .setType(FetchPositionRequest.class.getSimpleName()) + .build(); + + Payload request = Payload.newBuilder() + .setMetadata(metadata) + .setBody( + Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(fetchPositionRequest)))) + .build()) + .build(); + Payload response = adminServiceBlockingStub.invoke(request); + if (response.getMetadata().getType().equals(FetchPositionResponse.class.getSimpleName())) { + FetchPositionResponse fetchPositionResponse = + JsonUtils.parseObject(response.getBody().getValue().toStringUtf8(), FetchPositionResponse.class); + assert fetchPositionResponse != null; + if (fetchPositionResponse.isSuccess()) { + for (RecordPosition recordPosition : fetchPositionResponse.getRecordPosition()) { + positionStore.put(recordPosition.getRecordPartition(), recordPosition.getRecordOffset()); + } + } + } + } + log.info("memory position map {}", positionStore.getKVMap()); + return positionStore.getKVMap(); + } + + @Override + public RecordOffset getPosition(RecordPartition partition) { + // get from memory storage first + if (positionStore.get(partition) == null) { + log.info("fetch position from admin server"); + FetchPositionRequest fetchPositionRequest = new FetchPositionRequest(); + fetchPositionRequest.setJobID(jobId); + fetchPositionRequest.setAddress(IPUtils.getLocalAddress()); + fetchPositionRequest.setDataSourceType(dataSourceType); + RecordPosition fetchRecordPosition = new RecordPosition(); + fetchRecordPosition.setRecordPartition(partition); + fetchPositionRequest.setRecordPosition(fetchRecordPosition); + + Metadata metadata = Metadata.newBuilder() + .setType(FetchPositionRequest.class.getSimpleName()) + .build(); + + Payload request = Payload.newBuilder() + .setMetadata(metadata) + .setBody( + Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(fetchPositionRequest)))) + .build()) + .build(); + Payload response = adminServiceBlockingStub.invoke(request); + if (response.getMetadata().getType().equals(FetchPositionResponse.class.getSimpleName())) { + FetchPositionResponse fetchPositionResponse = + JsonUtils.parseObject(response.getBody().getValue().toStringUtf8(), FetchPositionResponse.class); + assert fetchPositionResponse != null; + if (fetchPositionResponse.isSuccess()) { + for (RecordPosition recordPosition : fetchPositionResponse.getRecordPosition()) { + positionStore.put(recordPosition.getRecordPartition(), recordPosition.getRecordOffset()); + } + } + } + } + log.info("memory record position {}", positionStore.get(partition)); + return positionStore.get(partition); + } + + @Override + public void putPosition(Map positions) { + positionStore.putAll(positions); + } + + @Override + public void putPosition(RecordPartition partition, RecordOffset position) { + positionStore.put(partition, position); + } + + @Override + public void removePosition(List partitions) { + if (partitions == null) { + return; + } + for (RecordPartition partition : partitions) { + positionStore.remove(partition); + } + } + + @Override + public void initialize(OffsetStorageConfig offsetStorageConfig) { + this.dataSourceType = offsetStorageConfig.getDataSourceType(); + this.dataSinkType = offsetStorageConfig.getDataSinkType(); + + this.adminServerAddr = getRandomAdminServerAddr(offsetStorageConfig.getOffsetStorageAddr()); + this.channel = ManagedChannelBuilder.forTarget(adminServerAddr) + .usePlaintext() + .build(); + this.adminServiceStub = AdminServiceGrpc.newStub(channel).withWaitForReady(); + this.adminServiceBlockingStub = AdminServiceGrpc.newBlockingStub(channel).withWaitForReady(); + + responseObserver = new StreamObserver() { + @Override + public void onNext(Payload response) { + log.info("receive message: {} ", response); + } + + @Override + public void onError(Throwable t) { + log.error("receive error message: {}", t.getMessage()); + } + + @Override + public void onCompleted() { + log.info("finished receive message and completed"); + } + }; + + requestObserver = adminServiceStub.invokeBiStream(responseObserver); + + this.positionStore = new MemoryBasedKeyValueStore<>(); + String offset = offsetStorageConfig.getExtensions().get("offset"); + if (offset != null) { + Map initialRecordOffsetMap = JsonUtils.parseTypeReferenceObject(offset, + new TypeReference>() { + }); + log.info("init record offset {}", initialRecordOffsetMap); + positionStore.putAll(initialRecordOffsetMap); + } + this.jobState = TaskState.RUNNING; + this.jobId = offsetStorageConfig.getExtensions().get("jobId"); + } + + private String getRandomAdminServerAddr(String adminServerAddrList) { + String[] addresses = adminServerAddrList.split(";"); + if (addresses.length == 0) { + throw new IllegalArgumentException("Admin server address list is empty"); + } + Random random = new Random(); + int randomIndex = random.nextInt(addresses.length); + return addresses[randomIndex]; + } +} diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetManagementService b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetManagementService new file mode 100644 index 0000000000..11b4466d79 --- /dev/null +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-admin/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetManagementService @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +admin=org.apache.eventmesh.openconnect.offsetmgmt.admin.AdminOffsetService \ No newline at end of file diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/build.gradle b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/build.gradle index 97c3b8c33c..1338b0b7d8 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/build.gradle +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/build.gradle @@ -16,6 +16,7 @@ */ dependencies { + implementation project(":eventmesh-common") api project(":eventmesh-spi") api "org.slf4j:slf4j-api" diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendExcepionContext.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendExceptionContext.java similarity index 90% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendExcepionContext.java rename to eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendExceptionContext.java index 0311ceaef5..974b19a547 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendExcepionContext.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendExceptionContext.java @@ -15,15 +15,15 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.callback; +package org.apache.eventmesh.openconnect.offsetmgmt.api.callback; -public class SendExcepionContext { +public class SendExceptionContext { private String messageId; private String topic; private Throwable cause; - public SendExcepionContext() { + public SendExceptionContext() { } public String getMessageId() { diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendMessageCallback.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendMessageCallback.java similarity index 87% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendMessageCallback.java rename to eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendMessageCallback.java index fd6baba7ec..8346cf36b4 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendMessageCallback.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendMessageCallback.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.callback; +package org.apache.eventmesh.openconnect.offsetmgmt.api.callback; /** * Message sending callback interface. @@ -24,5 +24,5 @@ public interface SendMessageCallback { void onSuccess(SendResult sendResult); - void onException(SendExcepionContext sendExcepionContext); + void onException(SendExceptionContext sendExceptionContext); } diff --git a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendResult.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendResult.java similarity index 95% rename from eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendResult.java rename to eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendResult.java index 8cd861f6de..9afc745f3d 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-java/src/main/java/org/apache/eventmesh/openconnect/api/callback/SendResult.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/callback/SendResult.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.eventmesh.openconnect.api.callback; +package org.apache.eventmesh.openconnect.offsetmgmt.api.callback; public class SendResult { diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java index 119f058b58..0a41e18f7c 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/ConnectRecord.java @@ -17,22 +17,45 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.data; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordPosition; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendMessageCallback; + import java.util.Objects; import java.util.Set; +import java.util.UUID; + +import lombok.Getter; +import lombok.Setter; /** * SourceDataEntries are generated by SourceTasks and passed to specific message queue to store. */ +@Getter public class ConnectRecord { + private final String recordId = UUID.randomUUID().toString(); + + @Setter private Long timestamp; + @Setter private Object data; + @Setter private RecordPosition position; + @Setter private KeyValue extensions; + @Setter + private SendMessageCallback callback; + + public ConnectRecord() { + + } + public ConnectRecord(RecordPartition recordPartition, RecordOffset recordOffset, Long timestamp) { this(recordPartition, recordOffset, timestamp, null); @@ -40,43 +63,15 @@ public ConnectRecord(RecordPartition recordPartition, RecordOffset recordOffset, public ConnectRecord(RecordPartition recordPartition, RecordOffset recordOffset, Long timestamp, Object data) { - this.position = new RecordPosition(recordPartition, recordOffset); - this.timestamp = timestamp; - this.data = data; - } - - public Long getTimestamp() { - return timestamp; - } - - public void setTimestamp(Long timestamp) { + if (recordPartition == null || recordOffset == null) { + this.position = null; + } else { + this.position = new RecordPosition(recordPartition, recordOffset); + } this.timestamp = timestamp; - } - - public Object getData() { - return data; - } - - public void setData(Object data) { this.data = data; } - public KeyValue getExtensions() { - return extensions; - } - - public void setExtensions(KeyValue extensions) { - this.extensions = extensions; - } - - public RecordPosition getPosition() { - return position; - } - - public void setPosition(RecordPosition position) { - this.position = position; - } - public void addExtension(KeyValue extensions) { if (this.extensions == null) { this.extensions = new DefaultKeyValue(); @@ -124,19 +119,20 @@ public boolean equals(Object o) { return false; } ConnectRecord that = (ConnectRecord) o; - return Objects.equals(timestamp, that.timestamp) && Objects.equals(data, that.data) + return Objects.equals(recordId, that.recordId) && Objects.equals(timestamp, that.timestamp) && Objects.equals(data, that.data) && Objects.equals(position, that.position) && Objects.equals(extensions, that.extensions); } @Override public int hashCode() { - return Objects.hash(timestamp, data, position, extensions); + return Objects.hash(recordId, timestamp, data, position, extensions); } @Override public String toString() { return "ConnectRecord{" - + "timestamp=" + timestamp + + "recordId=" + recordId + + ", timestamp=" + timestamp + ", data=" + data + ", position=" + position + ", extensions=" + extensions diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java index a0390c1892..891df482be 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/DefaultKeyValue.java @@ -23,6 +23,11 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter public class DefaultKeyValue implements KeyValue { private final Map properties; diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffsetManagement.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffsetManagement.java index 2eaefbef29..7e6b5042f8 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffsetManagement.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/data/RecordOffsetManagement.java @@ -17,6 +17,10 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.data; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordPosition; + import java.util.Collections; import java.util.Deque; import java.util.HashMap; @@ -42,12 +46,13 @@ public RecordOffsetManagement() { /** * submit record + * * @param position * @return */ public SubmittedPosition submitRecord(RecordPosition position) { SubmittedPosition submittedPosition = new SubmittedPosition(position); - records.computeIfAbsent(position.getPartition(), e -> new LinkedList<>()).add(submittedPosition); + records.computeIfAbsent(position.getRecordPartition(), e -> new LinkedList<>()).add(submittedPosition); // ensure thread safety in operation synchronized (this) { numUnacked.incrementAndGet(); @@ -63,7 +68,7 @@ private RecordOffset pollOffsetWhile(Deque submittedPositions RecordOffset offset = null; // Stop pulling if there is an uncommitted breakpoint while (canCommitHead(submittedPositions)) { - offset = submittedPositions.poll().getPosition().getOffset(); + offset = submittedPositions.poll().getPosition().getRecordOffset(); } return offset; } @@ -132,8 +137,8 @@ private synchronized void messageAcked() { } /** - * Contains a snapshot of offsets that can be committed for a source task and metadata for that offset commit - * (such as the number of messages for which offsets can and cannot be committed). + * Contains a snapshot of offsets that can be committed for a source task and metadata for that offset commit (such as the number of messages for + * which offsets can and cannot be committed). */ public static class CommittableOffsets { @@ -235,19 +240,19 @@ public void ack() { * @return */ public boolean remove() { - Deque deque = records.get(position.getPartition()); + Deque deque = records.get(position.getRecordPartition()); if (deque == null) { return false; } boolean result = deque.removeLastOccurrence(this); if (deque.isEmpty()) { - records.remove(position.getPartition()); + records.remove(position.getRecordPartition()); } if (result) { messageAcked(); } else { log.warn("Attempted to remove record from submitted queue for partition {}, " - + "but the record has not been submitted or has already been removed", position.getPartition()); + + "but the record has not been submitted or has already been removed", position.getRecordPartition()); } return result; } diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/DefaultOffsetManagementServiceImpl.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/DefaultOffsetManagementServiceImpl.java index e31fc358f0..be72097911 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/DefaultOffsetManagementServiceImpl.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/DefaultOffsetManagementServiceImpl.java @@ -17,8 +17,9 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.config.OffsetStorageConfig; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import java.util.List; import java.util.Map; @@ -51,27 +52,27 @@ public void synchronize() { } @Override - public Map getPositionMap() { + public Map getPositionMap() { return null; } @Override - public RecordOffset getPosition(ConnectorRecordPartition partition) { + public RecordOffset getPosition(RecordPartition partition) { return null; } @Override - public void putPosition(Map positions) { + public void putPosition(Map positions) { } @Override - public void putPosition(ConnectorRecordPartition partition, RecordOffset position) { + public void putPosition(RecordPartition partition, RecordOffset position) { } @Override - public void removePosition(List partitions) { + public void removePosition(List partitions) { } diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetManagementService.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetManagementService.java index ef1dc0d30d..62327a1ae9 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetManagementService.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetManagementService.java @@ -17,8 +17,9 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.config.OffsetStorageConfig; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import org.apache.eventmesh.spi.EventMeshExtensionType; import org.apache.eventmesh.spi.EventMeshSPI; @@ -49,12 +50,12 @@ default void configure(OffsetStorageConfig config) { } /** - * Persist position info in a persist store. + * Persist position info in a persisted store. */ void persist(); /** - * load position info in a persist store. + * load position info in a persisted store. */ void load(); @@ -68,24 +69,24 @@ default void configure(OffsetStorageConfig config) { * * @return */ - Map getPositionMap(); + Map getPositionMap(); - RecordOffset getPosition(ConnectorRecordPartition partition); + RecordOffset getPosition(RecordPartition partition); /** * Put a position info. */ - void putPosition(Map positions); + void putPosition(Map positions); - void putPosition(ConnectorRecordPartition partition, RecordOffset position); + void putPosition(RecordPartition partition, RecordOffset position); /** * Remove a position info. * * @param partitions */ - void removePosition(List partitions); + void removePosition(List partitions); - void initialize(OffsetStorageConfig connectorConfig); + void initialize(OffsetStorageConfig offsetStorageConfig); } diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReader.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReader.java index dc17e29840..30546b96cb 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReader.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReader.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import java.util.Collection; import java.util.Map; diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReaderImpl.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReaderImpl.java index efd087404e..ca4ad2c751 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReaderImpl.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageReaderImpl.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import java.util.Collection; import java.util.HashMap; @@ -26,29 +26,24 @@ public class OffsetStorageReaderImpl implements OffsetStorageReader { - private final String connectorName; - private OffsetManagementService offsetManagementService; - public OffsetStorageReaderImpl(String connectorName, OffsetManagementService offsetManagementService) { - this.connectorName = connectorName; + public OffsetStorageReaderImpl(OffsetManagementService offsetManagementService) { this.offsetManagementService = offsetManagementService; } @Override public RecordOffset readOffset(RecordPartition partition) { - ConnectorRecordPartition connectorRecordPartition = new ConnectorRecordPartition(connectorName, partition.getPartition()); - return offsetManagementService.getPositionMap().get(connectorRecordPartition); + return offsetManagementService.getPositionMap().get(partition); } @Override public Map readOffsets(Collection partitions) { Map result = new HashMap<>(); - Map allData = offsetManagementService.getPositionMap(); + Map allData = offsetManagementService.getPositionMap(); for (RecordPartition key : partitions) { - ConnectorRecordPartition connectorRecordPartition = new ConnectorRecordPartition(connectorName, key.getPartition()); - if (allData.containsKey(connectorRecordPartition)) { - result.put(key, allData.get(connectorRecordPartition)); + if (allData.containsKey(key)) { + result.put(key, allData.get(key)); } } return result; diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriter.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriter.java index f8b6bdc45e..fb30acc918 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriter.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriter.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import java.util.Map; diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriterImpl.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriterImpl.java index 3c5ed033a8..ef52602d60 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriterImpl.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-api/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/api/storage/OffsetStorageWriterImpl.java @@ -17,8 +17,8 @@ package org.apache.eventmesh.openconnect.offsetmgmt.api.storage; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordPartition; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import java.io.Closeable; import java.io.IOException; @@ -35,29 +35,25 @@ @Slf4j public class OffsetStorageWriterImpl implements OffsetStorageWriter, Closeable { - private final String connectorName; private final ExecutorService executorService = Executors.newSingleThreadExecutor(); - private OffsetManagementService offsetManagementService; + private final OffsetManagementService offsetManagementService; /** * Offset data in Connect format */ - private Map data = new HashMap<>(); - private Map toFlush = null; + private Map data = new HashMap<>(); + private Map toFlush = null; // Unique ID for each flush request to handle callbacks after timeouts private long currentFlushId = 0; - public OffsetStorageWriterImpl(String connectorName, OffsetManagementService offsetManagementService) { - this.connectorName = connectorName; + public OffsetStorageWriterImpl(OffsetManagementService offsetManagementService) { this.offsetManagementService = offsetManagementService; } @Override public void writeOffset(RecordPartition partition, RecordOffset offset) { - ConnectorRecordPartition extendRecordPartition; if (partition != null) { - extendRecordPartition = new ConnectorRecordPartition(connectorName, partition.getPartition()); - data.put(extendRecordPartition, offset); + data.put(partition, offset); } } diff --git a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-nacos/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/nacos/NacosConfigService.java b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-nacos/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/nacos/NacosConfigService.java index 8d4e9dade8..67c53d4d6d 100644 --- a/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-nacos/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/nacos/NacosConfigService.java +++ b/eventmesh-openconnect/eventmesh-openconnect-offsetmgmt-plugin/eventmesh-openconnect-offsetmgmt-nacos/src/main/java/org/apache/eventmesh/openconnect/offsetmgmt/nacos/NacosConfigService.java @@ -17,9 +17,9 @@ package org.apache.eventmesh.openconnect.offsetmgmt.nacos; -import org.apache.eventmesh.openconnect.offsetmgmt.api.config.OffsetStorageConfig; -import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffset; -import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.ConnectorRecordPartition; +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; +import org.apache.eventmesh.common.remote.offset.RecordOffset; +import org.apache.eventmesh.common.remote.offset.RecordPartition; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.KeyValueStore; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.MemoryBasedKeyValueStore; import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetManagementService; @@ -56,7 +56,7 @@ public class NacosConfigService implements OffsetManagementService { private Listener listener; - public KeyValueStore positionStore; + public KeyValueStore positionStore; @Override public void start() { @@ -68,19 +68,19 @@ public void start() { } // merge the updated connectorRecord & recordOffset to memory store - public void mergeOffset(ConnectorRecordPartition connectorRecordPartition, RecordOffset recordOffset) { - if (connectorRecordPartition == null || connectorRecordPartition.getPartition().isEmpty()) { + public void mergeOffset(RecordPartition recordPartition, RecordOffset recordOffset) { + if (recordPartition == null) { return; } - if (positionStore.getKVMap().containsKey(connectorRecordPartition)) { - RecordOffset existedOffset = positionStore.getKVMap().get(connectorRecordPartition); + if (positionStore.getKVMap().containsKey(recordPartition)) { + RecordOffset existedOffset = positionStore.getKVMap().get(recordPartition); // update if (!recordOffset.equals(existedOffset)) { - positionStore.put(connectorRecordPartition, recordOffset); + positionStore.put(recordPartition, recordOffset); } } else { // add new position - positionStore.put(connectorRecordPartition, recordOffset); + positionStore.put(recordPartition, recordOffset); } } @@ -108,12 +108,12 @@ public void load() { @Override public void synchronize() { try { - Map recordMap = positionStore.getKVMap(); + Map recordMap = positionStore.getKVMap(); List> recordToSyncList = new ArrayList<>(); - for (Map.Entry entry : recordMap.entrySet()) { + for (Map.Entry entry : recordMap.entrySet()) { Map synchronizeMap = new HashMap<>(); - synchronizeMap.put("connectorRecordPartition", entry.getKey()); + synchronizeMap.put("recordPartition", entry.getKey()); synchronizeMap.put("recordOffset", entry.getValue()); recordToSyncList.add(synchronizeMap); } @@ -125,13 +125,14 @@ public void synchronize() { } @Override - public Map getPositionMap() { + public Map getPositionMap() { // get from memory storage first if (positionStore.getKVMap() == null || positionStore.getKVMap().isEmpty()) { try { - Map configMap = JacksonUtils.toObj(configService.getConfig(dataId, group, 5000L), - new TypeReference>() { + Map configMap = JacksonUtils.toObj(configService.getConfig(dataId, group, 5000L), + new TypeReference>() { }); + positionStore.putAll(configMap); log.info("nacos position map {}", configMap); return configMap; } catch (NacosException e) { @@ -143,12 +144,12 @@ public Map getPositionMap() { } @Override - public RecordOffset getPosition(ConnectorRecordPartition partition) { + public RecordOffset getPosition(RecordPartition partition) { // get from memory storage first if (positionStore.get(partition) == null) { try { - Map recordMap = JacksonUtils.toObj(configService.getConfig(dataId, group, 5000L), - new TypeReference>() { + Map recordMap = JacksonUtils.toObj(configService.getConfig(dataId, group, 5000L), + new TypeReference>() { }); log.info("nacos record position {}", recordMap.get(partition)); return recordMap.get(partition); @@ -161,21 +162,21 @@ public RecordOffset getPosition(ConnectorRecordPartition partition) { } @Override - public void putPosition(Map positions) { + public void putPosition(Map positions) { positionStore.putAll(positions); } @Override - public void putPosition(ConnectorRecordPartition partition, RecordOffset position) { + public void putPosition(RecordPartition partition, RecordOffset position) { positionStore.put(partition, position); } @Override - public void removePosition(List partitions) { + public void removePosition(List partitions) { if (partitions == null) { return; } - for (ConnectorRecordPartition partition : partitions) { + for (RecordPartition partition : partitions) { positionStore.remove(partition); } } @@ -206,13 +207,13 @@ public void receiveConfigInfo(String configInfo) { }); for (Map recordPartitionOffsetMap : recordOffsetList) { - ConnectorRecordPartition connectorRecordPartition = JacksonUtils.toObj( - JacksonUtils.toJson(recordPartitionOffsetMap.get("connectorRecordPartition")), - ConnectorRecordPartition.class); + RecordPartition recordPartition = JacksonUtils.toObj( + JacksonUtils.toJson(recordPartitionOffsetMap.get("recordPartition")), + RecordPartition.class); RecordOffset recordOffset = JacksonUtils.toObj(JacksonUtils.toJson(recordPartitionOffsetMap.get("recordOffset")), RecordOffset.class); // update the offset in memory store - mergeOffset(connectorRecordPartition, recordOffset); + mergeOffset(recordPartition, recordOffset); } } }; diff --git a/eventmesh-operator/config/samples/eventmesh_v1_runtime.yaml b/eventmesh-operator/config/samples/eventmesh_v1_runtime.yaml index fc640aa639..590928ca97 100644 --- a/eventmesh-operator/config/samples/eventmesh_v1_runtime.yaml +++ b/eventmesh-operator/config/samples/eventmesh_v1_runtime.yaml @@ -41,7 +41,6 @@ data: # HTTP Admin Server eventMesh.server.admin.http.port=10106 ########################## eventMesh tcp configuration ############################ - eventMesh.server.tcp.enabled=true eventMesh.server.tcp.readerIdleSeconds=120 eventMesh.server.tcp.writerIdleSeconds=120 eventMesh.server.tcp.allIdleSeconds=120 diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle index 0bc8809e86..e6ffc372b9 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-cloudevents/build.gradle @@ -20,10 +20,10 @@ dependencies { implementation "io.cloudevents:cloudevents-core" implementation "com.google.guava:guava" implementation "io.cloudevents:cloudevents-json-jackson" - implementation ("io.grpc:grpc-protobuf:1.64.0") { + implementation ("io.grpc:grpc-protobuf:1.68.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } - implementation("com.google.protobuf:protobuf-java:3.21.5") + implementation("com.google.protobuf:protobuf-java:3.25.4") implementation "io.cloudevents:cloudevents-protobuf" compileOnly 'org.projectlombok:lombok' diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle index 37ea099103..5929c72136 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-grpc/build.gradle @@ -24,8 +24,8 @@ repositories { mavenCentral() } -def grpcVersion = '1.64.0' // CURRENT_GRPC_VERSION -def protobufVersion = '3.21.5' +def grpcVersion = '1.68.0' +def protobufVersion = '3.25.4' def protocVersion = protobufVersion dependencies { @@ -36,7 +36,7 @@ dependencies { implementation "io.grpc:grpc-stub:${grpcVersion}" implementation "com.google.protobuf:protobuf-java-util:${protobufVersion}" implementation "javax.annotation:javax.annotation-api:1.3.2" - testImplementation 'org.junit.jupiter:junit-jupiter:5.6.0' + testImplementation 'org.junit.jupiter:junit-jupiter' } protobuf { diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle index af9ac9198a..d219c5dc03 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-http/build.gradle @@ -20,8 +20,8 @@ dependencies { implementation "io.cloudevents:cloudevents-core" implementation "com.google.guava:guava" implementation "io.cloudevents:cloudevents-json-jackson" - implementation ("io.grpc:grpc-protobuf:1.64.0") { + implementation ("io.grpc:grpc-protobuf:1.68.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } - implementation("com.google.protobuf:protobuf-java:3.21.5") + implementation("com.google.protobuf:protobuf-java:3.25.4") } diff --git a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle index 6dbb13157b..3f15d199ff 100644 --- a/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle +++ b/eventmesh-protocol-plugin/eventmesh-protocol-meshmessage/build.gradle @@ -18,10 +18,10 @@ dependencies { implementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") implementation "io.cloudevents:cloudevents-core" - implementation ("io.grpc:grpc-protobuf:1.64.0") { + implementation ("io.grpc:grpc-protobuf:1.68.0") { exclude group: "com.google.protobuf", module: "protobuf-java" } - implementation("com.google.protobuf:protobuf-java:3.21.5") + implementation("com.google.protobuf:protobuf-java:3.25.4") implementation "io.cloudevents:cloudevents-protobuf" testImplementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") diff --git a/eventmesh-registry/.gitignore b/eventmesh-registry/.gitignore new file mode 100644 index 0000000000..b63da4551b --- /dev/null +++ b/eventmesh-registry/.gitignore @@ -0,0 +1,42 @@ +.gradle +build/ +!gradle/wrapper/gradle-wrapper.jar +!**/src/main/**/build/ +!**/src/test/**/build/ + +### IntelliJ IDEA ### +.idea/modules.xml +.idea/jarRepositories.xml +.idea/compiler.xml +.idea/libraries/ +*.iws +*.iml +*.ipr +out/ +!**/src/main/**/out/ +!**/src/test/**/out/ + +### Eclipse ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache +bin/ +!**/src/main/**/bin/ +!**/src/test/**/bin/ + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ + +### VS Code ### +.vscode/ + +### Mac OS ### +.DS_Store \ No newline at end of file diff --git a/eventmesh-registry/build.gradle b/eventmesh-registry/build.gradle new file mode 100644 index 0000000000..d973dcedae --- /dev/null +++ b/eventmesh-registry/build.gradle @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ \ No newline at end of file diff --git a/eventmesh-registry/eventmesh-registry-api/build.gradle b/eventmesh-registry/eventmesh-registry-api/build.gradle new file mode 100644 index 0000000000..c0546b6169 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/build.gradle @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +dependencies { + implementation project(":eventmesh-spi") + implementation project(":eventmesh-common") + implementation "com.alibaba.nacos:nacos-client" + + compileOnly 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' +} \ No newline at end of file diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/NotifyEvent.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/NotifyEvent.java new file mode 100644 index 0000000000..fdef6a3285 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/NotifyEvent.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry; + +import java.util.List; + +import lombok.Getter; + +public class NotifyEvent { + + public NotifyEvent() { + + } + + public NotifyEvent(List instances) { + this(instances, false); + } + + public NotifyEvent(List instances, boolean isIncrement) { + this.isIncrement = isIncrement; + this.instances = instances; + } + + + // means whether it is an increment data + @Getter + private boolean isIncrement; + + @Getter + private List instances; +} diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/QueryInstances.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/QueryInstances.java new file mode 100644 index 0000000000..c8c7d61f4d --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/QueryInstances.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Data; + +@Data +public class QueryInstances { + + private String serviceName; + private boolean health; + private Map extFields = new HashMap<>(); +} diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegisterServerInfo.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegisterServerInfo.java new file mode 100644 index 0000000000..0bf411c037 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegisterServerInfo.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry; + +import java.util.HashMap; +import java.util.Map; + +import lombok.Getter; +import lombok.Setter; +import lombok.ToString; + +@ToString +public class RegisterServerInfo { + + // different implementations will have different formats + @Getter + @Setter + private String serviceName; + + @Getter + @Setter + private String address; + + @Getter + @Setter + private boolean health; + @Getter + private Map metadata = new HashMap<>(); + @Getter + private Map extFields = new HashMap<>(); + + public void setMetadata(Map metadata) { + if (metadata == null) { + this.metadata.clear(); + return; + } + + this.metadata = metadata; + } + + public void addMetadata(String key, String value) { + this.metadata.put(key, value); + } + + public void setExtFields(Map extFields) { + if (extFields == null) { + this.extFields.clear(); + return; + } + + this.extFields = extFields; + } + + public void addExtFields(String key, Object value) { + this.extFields.put(key, value); + } +} diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryFactory.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryFactory.java new file mode 100644 index 0000000000..d757781c2b --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryFactory.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry; + +import org.apache.eventmesh.spi.EventMeshExtensionFactory; + +import java.util.HashMap; +import java.util.Map; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class RegistryFactory { + + private static final Map META_CACHE = new HashMap<>(16); + + public static RegistryService getInstance(String registryPluginType) { + return META_CACHE.computeIfAbsent(registryPluginType, RegistryFactory::registryBuilder); + } + + private static RegistryService registryBuilder(String registryPluginType) { + RegistryService registryServiceExt = EventMeshExtensionFactory.getExtension(RegistryService.class, registryPluginType); + if (registryServiceExt == null) { + String errorMsg = "can't load the registry plugin, please check."; + log.error(errorMsg); + throw new RuntimeException(errorMsg); + } + log.info("build registry plugin [{}] by type [{}] success", registryServiceExt.getClass().getSimpleName(), + registryPluginType); + return registryServiceExt; + } +} diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryListener.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryListener.java new file mode 100644 index 0000000000..81445fbe20 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryListener.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry; + +/** + * RegistryListener + */ +public interface RegistryListener { + + void onChange(NotifyEvent event) throws Exception; +} diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryService.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryService.java new file mode 100644 index 0000000000..63243cd339 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/RegistryService.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry; + + +import org.apache.eventmesh.registry.exception.RegistryException; +import org.apache.eventmesh.spi.EventMeshExtensionType; +import org.apache.eventmesh.spi.EventMeshSPI; + +import java.util.List; + +/** + * RegistryService + */ +@EventMeshSPI(eventMeshExtensionType = EventMeshExtensionType.REGISTRY) +public interface RegistryService { + void init() throws RegistryException; + + void shutdown() throws RegistryException; + + void subscribe(RegistryListener registryListener, String serviceName); + + void unsubscribe(RegistryListener registryListener, String serviceName); + + List selectInstances(QueryInstances serverInfo); + + boolean register(RegisterServerInfo registerInfo) throws RegistryException; + + boolean unRegister(RegisterServerInfo registerInfo) throws RegistryException; +} diff --git a/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/exception/RegistryException.java b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/exception/RegistryException.java new file mode 100644 index 0000000000..1aa61bd246 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-api/src/main/java/org/apache/eventmesh/registry/exception/RegistryException.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry.exception; + +public class RegistryException extends RuntimeException { + public RegistryException(String message) { + super(message); + } + + public RegistryException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/eventmesh-registry/eventmesh-registry-nacos/build.gradle b/eventmesh-registry/eventmesh-registry-nacos/build.gradle new file mode 100644 index 0000000000..d371f23812 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-nacos/build.gradle @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +dependencies { + implementation "com.alibaba.nacos:nacos-client" + implementation project(":eventmesh-registry:eventmesh-registry-api") + implementation project(":eventmesh-common") + + compileOnly 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' +} \ No newline at end of file diff --git a/eventmesh-registry/eventmesh-registry-nacos/gradle.properties b/eventmesh-registry/eventmesh-registry-nacos/gradle.properties new file mode 100644 index 0000000000..cf067e20bf --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-nacos/gradle.properties @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pluginType=registryCenter +pluginName=nacos \ No newline at end of file diff --git a/eventmesh-registry/eventmesh-registry-nacos/src/main/java/org/apache/eventmesh/registry/nacos/NacosDiscoveryService.java b/eventmesh-registry/eventmesh-registry-nacos/src/main/java/org/apache/eventmesh/registry/nacos/NacosDiscoveryService.java new file mode 100644 index 0000000000..54d9d8b9d3 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-nacos/src/main/java/org/apache/eventmesh/registry/nacos/NacosDiscoveryService.java @@ -0,0 +1,312 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry.nacos; + +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.registry.NotifyEvent; +import org.apache.eventmesh.registry.QueryInstances; +import org.apache.eventmesh.registry.RegisterServerInfo; +import org.apache.eventmesh.registry.RegistryListener; +import org.apache.eventmesh.registry.RegistryService; +import org.apache.eventmesh.registry.exception.RegistryException; + +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.Executor; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; + +import com.alibaba.nacos.api.NacosFactory; +import com.alibaba.nacos.api.PropertyKeyConst; +import com.alibaba.nacos.api.exception.NacosException; +import com.alibaba.nacos.api.naming.NamingService; +import com.alibaba.nacos.api.naming.listener.AbstractEventListener; +import com.alibaba.nacos.api.naming.listener.Event; +import com.alibaba.nacos.api.naming.listener.EventListener; +import com.alibaba.nacos.api.naming.listener.NamingEvent; +import com.alibaba.nacos.api.naming.pojo.Instance; +import com.alibaba.nacos.api.naming.pojo.ServiceInfo; +import com.alibaba.nacos.api.naming.utils.NamingUtils; +import com.alibaba.nacos.client.naming.utils.UtilAndComs; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class NacosDiscoveryService implements RegistryService { + + private final AtomicBoolean initFlag = new AtomicBoolean(false); + + private NacosRegistryConfiguration nacosConf; + + private NamingService namingService; + + private final Map> listeners = new HashMap<>(); + + private static final Executor notifyExecutor = new ThreadPoolExecutor(1, 1, 60L, TimeUnit.SECONDS, + new LinkedBlockingQueue<>(20), r -> { + Thread t = new Thread(r); + t.setName("org.apache.eventmesh.registry.nacos.executor"); + t.setDaemon(true); + return t; + }, new ThreadPoolExecutor.DiscardOldestPolicy() + ); + + private final Lock lock = new ReentrantLock(); + + + @Override + public void init() throws RegistryException { + if (!initFlag.compareAndSet(false, true)) { + return; + } + nacosConf = ConfigService.getInstance().buildConfigInstance(NacosRegistryConfiguration.class); + if (nacosConf == null) { + log.info("nacos registry configuration is null"); + } + Properties properties = buildProperties(); + // registry + try { + this.namingService = NacosFactory.createNamingService(properties); + } catch (NacosException e) { + log.error("[NacosRegistryService][start] error", e); + throw new RegistryException(e.getMessage()); + } + } + + private Properties buildProperties() { + Properties properties = new Properties(); + if (nacosConf == null) { + return properties; + } + properties.setProperty(PropertyKeyConst.SERVER_ADDR, nacosConf.getRegistryAddr()); + properties.setProperty(PropertyKeyConst.USERNAME, nacosConf.getEventMeshRegistryPluginUsername()); + properties.setProperty(PropertyKeyConst.PASSWORD, nacosConf.getEventMeshRegistryPluginPassword()); + + String endpoint = nacosConf.getEndpoint(); + if (Objects.nonNull(endpoint) && endpoint.contains(":")) { + int index = endpoint.indexOf(":"); + properties.put(PropertyKeyConst.ENDPOINT, endpoint.substring(0, index)); + properties.put(PropertyKeyConst.ENDPOINT_PORT, endpoint.substring(index + 1)); + } else { + Optional.ofNullable(endpoint).ifPresent(value -> properties.put(PropertyKeyConst.ENDPOINT, endpoint)); + String endpointPort = nacosConf.getEndpointPort(); + Optional.ofNullable(endpointPort).ifPresent(value -> properties.put(PropertyKeyConst.ENDPOINT_PORT, + endpointPort)); + } + String accessKey = nacosConf.getAccessKey(); + Optional.ofNullable(accessKey).ifPresent(value -> properties.put(PropertyKeyConst.ACCESS_KEY, accessKey)); + String secretKey = nacosConf.getSecretKey(); + Optional.ofNullable(secretKey).ifPresent(value -> properties.put(PropertyKeyConst.SECRET_KEY, secretKey)); + String clusterName = nacosConf.getClusterName(); + Optional.ofNullable(clusterName).ifPresent(value -> properties.put(PropertyKeyConst.CLUSTER_NAME, clusterName)); + String logFileName = nacosConf.getLogFileName(); + Optional.ofNullable(logFileName).ifPresent(value -> properties.put(UtilAndComs.NACOS_NAMING_LOG_NAME, + logFileName)); + String logLevel = nacosConf.getLogLevel(); + Optional.ofNullable(logLevel).ifPresent(value -> properties.put(UtilAndComs.NACOS_NAMING_LOG_LEVEL, logLevel)); + Integer pollingThreadCount = nacosConf.getPollingThreadCount(); + Optional.ofNullable(pollingThreadCount).ifPresent(value -> properties.put(PropertyKeyConst.NAMING_POLLING_THREAD_COUNT, pollingThreadCount)); + String namespace = nacosConf.getNamespace(); + Optional.ofNullable(namespace).ifPresent(value -> properties.put(PropertyKeyConst.NAMESPACE, namespace)); + return properties; + } + + @Override + public void shutdown() throws RegistryException { + if (this.namingService != null) { + try { + namingService.shutDown(); + } catch (NacosException e) { + log.warn("shutdown nacos naming service fail", e); + } + } + } + + @Override + public void subscribe(RegistryListener listener, String serviceName) { + lock.lock(); + try { + ServiceInfo serviceInfo = ServiceInfo.fromKey(serviceName); + Map eventListenerMap = listeners.computeIfAbsent(serviceName, + k -> new HashMap<>()); + if (eventListenerMap.containsKey(listener)) { + log.warn("already use same listener subscribe service name {}", serviceName); + return; + } + EventListener eventListener = new AbstractEventListener() { + @Override + public Executor getExecutor() { + return notifyExecutor; + } + + @Override + public void onEvent(Event event) { + if (!(event instanceof NamingEvent)) { + log.warn("received notify event type isn't not as expected"); + return; + } + try { + NamingEvent namingEvent = (NamingEvent) event; + List instances = namingEvent.getInstances(); + List list = new ArrayList<>(); + if (instances != null) { + for (Instance instance : instances) { + RegisterServerInfo info = new RegisterServerInfo(); + info.setAddress(instance.getIp() + ":" + instance.getPort()); + info.setMetadata(instance.getMetadata()); + info.setHealth(instance.isHealthy()); + info.setServiceName( + ServiceInfo.getKey(NamingUtils.getGroupedName(namingEvent.getServiceName(), + namingEvent.getGroupName()), + namingEvent.getClusters())); + list.add(info); + } + } + listener.onChange(new NotifyEvent(list)); + } catch (Exception e) { + log.warn(""); + } + } + }; + List clusters; + if (serviceInfo.getClusters() == null || serviceInfo.getClusters().isEmpty()) { + clusters = new ArrayList<>(); + } else { + clusters = Arrays.stream(serviceInfo.getClusters().split(",")).collect(Collectors.toList()); + } + namingService.subscribe(serviceInfo.getName(), serviceInfo.getGroupName(), clusters, eventListener); + eventListenerMap.put(listener, eventListener); + } catch (Exception e) { + log.error("subscribe service name {} fail", serviceName, e); + } finally { + lock.unlock(); + } + } + + @Override + public void unsubscribe(RegistryListener registryListener, String serviceName) { + lock.lock(); + try { + ServiceInfo serviceInfo = ServiceInfo.fromKey(serviceName); + Map map = listeners.get(serviceName); + if (map == null) { + return; + } + List clusters; + if (serviceInfo.getClusters() == null || serviceInfo.getClusters().isEmpty()) { + clusters = new ArrayList<>(); + } else { + clusters = Arrays.stream(serviceInfo.getClusters().split(",")).collect(Collectors.toList()); + } + EventListener eventListener = map.get(registryListener); + namingService.unsubscribe(serviceInfo.getName(), serviceInfo.getGroupName(), clusters, eventListener); + map.remove(registryListener); + } catch (Exception e) { + log.error("unsubscribe service name {} fail", serviceName, e); + } finally { + lock.unlock(); + } + } + + @Override + public List selectInstances(QueryInstances queryInstances) { + ArrayList list = new ArrayList<>(); + try { + ServiceInfo serviceInfo = ServiceInfo.fromKey(queryInstances.getServiceName()); + ArrayList clusters = new ArrayList<>(); + if (StringUtils.isNotBlank(serviceInfo.getClusters())) { + clusters.addAll(Arrays.asList(serviceInfo.getClusters().split(","))); + } + List instances = namingService.selectInstances(serviceInfo.getName(), + serviceInfo.getGroupName(), clusters, + queryInstances.isHealth()); + if (instances != null) { + instances.forEach(x -> { + RegisterServerInfo instanceInfo = new RegisterServerInfo(); + instanceInfo.setMetadata(x.getMetadata()); + instanceInfo.setHealth(x.isHealthy()); + instanceInfo.setAddress(x.getIp() + ":" + x.getPort()); + instanceInfo.setServiceName( + ServiceInfo.getKey(NamingUtils.getGroupedName(x.getServiceName(), + serviceInfo.getGroupName()), x.getClusterName())); + list.add(instanceInfo); + }); + } + return list; + } catch (Exception e) { + log.error("select instance by query {} from nacos fail", queryInstances, e); + return list; + } + } + + @Override + public boolean register(RegisterServerInfo eventMeshRegisterInfo) throws RegistryException { + try { + String[] ipPort = eventMeshRegisterInfo.getAddress().split(":"); + if (ipPort.length < 2) { + return false; + } + ServiceInfo serviceInfo = ServiceInfo.fromKey(eventMeshRegisterInfo.getServiceName()); + Instance instance = new Instance(); + instance.setClusterName(serviceInfo.getClusters()); + instance.setEnabled(true); + instance.setEphemeral(true); + instance.setHealthy(eventMeshRegisterInfo.isHealth()); + instance.setWeight(1.0); + instance.setIp(ipPort[0]); + instance.setPort(Integer.parseInt(ipPort[1])); + instance.setMetadata(eventMeshRegisterInfo.getMetadata()); + namingService.registerInstance(serviceInfo.getName(), serviceInfo.getGroupName(), instance); + return true; + } catch (Exception e) { + log.error("register instance service {} fail", eventMeshRegisterInfo, e); + return false; + } + } + + @Override + public boolean unRegister(RegisterServerInfo eventMeshRegisterInfo) throws RegistryException { + try { + String[] ipPort = eventMeshRegisterInfo.getAddress().split(":"); + if (ipPort.length < 2) { + return false; + } + ServiceInfo serviceInfo = ServiceInfo.fromKey(eventMeshRegisterInfo.getServiceName()); + namingService.deregisterInstance(serviceInfo.getName(), serviceInfo.getGroupName(), ipPort[0], + Integer.parseInt(ipPort[1]), + serviceInfo.getClusters()); + return true; + } catch (Exception e) { + log.error("unregister instance service {} fail", eventMeshRegisterInfo, e); + return false; + } + } +} diff --git a/eventmesh-registry/eventmesh-registry-nacos/src/main/java/org/apache/eventmesh/registry/nacos/NacosRegistryConfiguration.java b/eventmesh-registry/eventmesh-registry-nacos/src/main/java/org/apache/eventmesh/registry/nacos/NacosRegistryConfiguration.java new file mode 100644 index 0000000000..7c908c9424 --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-nacos/src/main/java/org/apache/eventmesh/registry/nacos/NacosRegistryConfiguration.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.registry.nacos; + +import org.apache.eventmesh.common.config.CommonConfiguration; +import org.apache.eventmesh.common.config.Config; +import org.apache.eventmesh.common.config.ConfigField; + +import com.alibaba.nacos.api.PropertyKeyConst; +import com.alibaba.nacos.client.naming.utils.UtilAndComs; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@Config(prefix = "eventMesh.registry.nacos") +public class NacosRegistryConfiguration extends CommonConfiguration { + + @ConfigField(field = PropertyKeyConst.ENDPOINT) + private String endpoint; + + @ConfigField(field = PropertyKeyConst.ENDPOINT_PORT) + private String endpointPort; + + @ConfigField(field = PropertyKeyConst.ACCESS_KEY) + private String accessKey; + + @ConfigField(field = PropertyKeyConst.SECRET_KEY) + private String secretKey; + + @ConfigField(field = PropertyKeyConst.CLUSTER_NAME) + private String clusterName; + + @ConfigField(field = PropertyKeyConst.NAMESPACE) + private String namespace; + + @ConfigField(field = PropertyKeyConst.NAMING_POLLING_THREAD_COUNT) + private Integer pollingThreadCount = Runtime.getRuntime().availableProcessors() / 2 + 1; + + @ConfigField(field = UtilAndComs.NACOS_NAMING_LOG_NAME) + private String logFileName; + + @ConfigField(field = UtilAndComs.NACOS_NAMING_LOG_LEVEL) + private String logLevel; + +} diff --git a/eventmesh-registry/eventmesh-registry-nacos/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.registry.RegistryService b/eventmesh-registry/eventmesh-registry-nacos/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.registry.RegistryService new file mode 100644 index 0000000000..3301d56e5e --- /dev/null +++ b/eventmesh-registry/eventmesh-registry-nacos/src/main/resources/META-INF/eventmesh/org.apache.eventmesh.registry.RegistryService @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +nacos=org.apache.eventmesh.registry.nacos.NacosDiscoveryService \ No newline at end of file diff --git a/eventmesh-runtime-v2/bin/start-v2.sh b/eventmesh-runtime-v2/bin/start-v2.sh new file mode 100644 index 0000000000..fc67c29d3e --- /dev/null +++ b/eventmesh-runtime-v2/bin/start-v2.sh @@ -0,0 +1,200 @@ +#!/bin/bash +# +# Licensed to Apache Software Foundation (ASF) under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Apache Software Foundation (ASF) licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +#=========================================================================================== +# Java Environment Setting +#=========================================================================================== +set -e +# Server configuration may be inconsistent, add these configurations to avoid garbled code problems +export LANG=en_US.UTF-8 +export LC_CTYPE=en_US.UTF-8 +export LC_ALL=en_US.UTF-8 + +TMP_JAVA_HOME="/customize/your/java/home/here" + +# Detect operating system. +OS=$(uname) + +function is_java8_or_11 { + local _java="$1" + [[ -x "$_java" ]] || return 1 + [[ "$("$_java" -version 2>&1)" =~ 'java version "1.8' || "$("$_java" -version 2>&1)" =~ 'openjdk version "1.8' || "$("$_java" -version 2>&1)" =~ 'java version "11' || "$("$_java" -version 2>&1)" =~ 'openjdk version "11' ]] || return 2 + return 0 +} + +function extract_java_version { + local _java="$1" + local version=$("$_java" -version 2>&1 | awk -F '"' '/version/ {print $2}' | awk -F '.' '{if ($1 == 1 && $2 == 8) print "8"; else if ($1 == 11) print "11"; else print "unknown"}') + echo "$version" +} + +# 0(not running), 1(is running) +#function is_proxyRunning { +# local _pid="$1" +# local pid=`ps ax | grep -i 'org.apache.eventmesh.runtime.boot.EventMeshStartup' |grep java | grep -v grep | awk '{print $1}'|grep $_pid` +# if [ -z "$pid" ] ; then +# return 0 +# else +# return 1 +# fi +#} + +function get_pid { + local ppid="" + if [ -f ${EVENTMESH_HOME}/bin/pid.file ]; then + ppid=$(cat ${EVENTMESH_HOME}/bin/pid.file) + # If the process does not exist, it indicates that the previous process terminated abnormally. + if [ ! -d /proc/$ppid ]; then + # Remove the residual file. + rm ${EVENTMESH_HOME}/bin/pid.file + echo -e "ERROR\t EventMesh process had already terminated unexpectedly before, please check log output." + ppid="" + fi + else + if [[ $OS =~ Msys ]]; then + # There is a Bug on Msys that may not be able to kill the identified process + ppid=`jps -v | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | grep java | grep -v grep | awk -F ' ' {'print $1'}` + elif [[ $OS =~ Darwin ]]; then + # Known problem: grep Java may not be able to accurately identify Java processes + ppid=$(/bin/ps -o user,pid,command | grep "java" | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | grep -Ev "^root" |awk -F ' ' {'print $2'}) + else + if [ $DOCKER ]; then + # No need to exclude root user in Docker containers. + ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_HOME | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | awk -F ' ' {'print $2'}) + else + # It is required to identify the process as accurately as possible on Linux. + ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_HOME | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | grep -Ev "^root" | awk -F ' ' {'print $2'}) + fi + fi + fi + echo "$ppid"; +} + +#=========================================================================================== +# Locate Java Executable +#=========================================================================================== + +if [[ -d "$TMP_JAVA_HOME" ]] && is_java8_or_11 "$TMP_JAVA_HOME/bin/java"; then + JAVA="$TMP_JAVA_HOME/bin/java" + JAVA_VERSION=$(extract_java_version "$TMP_JAVA_HOME/bin/java") +elif [[ -d "$JAVA_HOME" ]] && is_java8_or_11 "$JAVA_HOME/bin/java"; then + JAVA="$JAVA_HOME/bin/java" + JAVA_VERSION=$(extract_java_version "$JAVA_HOME/bin/java") +elif is_java8_or_11 "$(which java)"; then + JAVA="$(which java)" + JAVA_VERSION=$(extract_java_version "$(which java)") +else + echo -e "ERROR\t Java 8 or 11 not found, operation abort." + exit 9; +fi + +echo "EventMesh using Java version: $JAVA_VERSION, path: $JAVA" + +EVENTMESH_HOME=$(cd "$(dirname "$0")/.." && pwd) +export EVENTMESH_HOME + +EVENTMESH_LOG_HOME="${EVENTMESH_HOME}/logs" +export EVENTMESH_LOG_HOME + +echo -e "EVENTMESH_HOME : ${EVENTMESH_HOME}\nEVENTMESH_LOG_HOME : ${EVENTMESH_LOG_HOME}" + +function make_logs_dir { + if [ ! -e "${EVENTMESH_LOG_HOME}" ]; then mkdir -p "${EVENTMESH_LOG_HOME}"; fi +} + +error_exit () +{ + echo -e "ERROR\t $1 !!" + exit 1 +} + +export JAVA_HOME + +#=========================================================================================== +# JVM Configuration +#=========================================================================================== +#if [ $1 = "prd" -o $1 = "benchmark" ]; then JAVA_OPT="${JAVA_OPT} -server -Xms2048M -Xmx4096M -Xmn2048m -XX:SurvivorRatio=4" +#elif [ $1 = "sit" ]; then JAVA_OPT="${JAVA_OPT} -server -Xms256M -Xmx512M -Xmn256m -XX:SurvivorRatio=4" +#elif [ $1 = "dev" ]; then JAVA_OPT="${JAVA_OPT} -server -Xms128M -Xmx256M -Xmn128m -XX:SurvivorRatio=4" +#fi + +GC_LOG_FILE="${EVENTMESH_LOG_HOME}/eventmesh_gc_%p.log" + +#JAVA_OPT="${JAVA_OPT} -server -Xms2048M -Xmx4096M -Xmn2048m -XX:SurvivorRatio=4" +JAVA_OPT=`cat ${EVENTMESH_HOME}/conf/server.env | grep APP_START_JVM_OPTION::: | awk -F ':::' {'print $2'}` +JAVA_OPT="${JAVA_OPT} -XX:+UseG1GC -XX:G1HeapRegionSize=16m -XX:G1ReservePercent=25 -XX:InitiatingHeapOccupancyPercent=30 -XX:SoftRefLRUPolicyMSPerMB=0 -XX:SurvivorRatio=8 -XX:MaxGCPauseMillis=50" +JAVA_OPT="${JAVA_OPT} -verbose:gc" +if [[ "$JAVA_VERSION" == "8" ]]; then + # Set JAVA_OPT for Java 8 + JAVA_OPT="${JAVA_OPT} -Xloggc:${GC_LOG_FILE} -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=30m" + JAVA_OPT="${JAVA_OPT} -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCApplicationStoppedTime -XX:+PrintAdaptiveSizePolicy" +elif [[ "$JAVA_VERSION" == "11" ]]; then + # Set JAVA_OPT for Java 11 + XLOG_PARAM="time,level,tags:filecount=5,filesize=30m" + JAVA_OPT="${JAVA_OPT} -Xlog:gc*:${GC_LOG_FILE}:${XLOG_PARAM}" + JAVA_OPT="${JAVA_OPT} -Xlog:safepoint:${GC_LOG_FILE}:${XLOG_PARAM} -Xlog:ergo*=debug:${GC_LOG_FILE}:${XLOG_PARAM}" +fi +JAVA_OPT="${JAVA_OPT} -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${EVENTMESH_LOG_HOME} -XX:ErrorFile=${EVENTMESH_LOG_HOME}/hs_err_%p.log" +JAVA_OPT="${JAVA_OPT} -XX:-OmitStackTraceInFastThrow" +JAVA_OPT="${JAVA_OPT} -XX:+AlwaysPreTouch" +JAVA_OPT="${JAVA_OPT} -XX:MaxDirectMemorySize=8G" +JAVA_OPT="${JAVA_OPT} -XX:-UseLargePages -XX:-UseBiasedLocking" +JAVA_OPT="${JAVA_OPT} -Dio.netty.leakDetectionLevel=advanced" +JAVA_OPT="${JAVA_OPT} -Dio.netty.allocator.type=pooled" +JAVA_OPT="${JAVA_OPT} -Djava.security.egd=file:/dev/./urandom" +JAVA_OPT="${JAVA_OPT} -Dlog4j.configurationFile=${EVENTMESH_HOME}/conf/log4j2.xml" +JAVA_OPT="${JAVA_OPT} -Deventmesh.log.home=${EVENTMESH_LOG_HOME}" +JAVA_OPT="${JAVA_OPT} -DconfPath=${EVENTMESH_HOME}/conf" +JAVA_OPT="${JAVA_OPT} -Dlog4j2.AsyncQueueFullPolicy=Discard" +JAVA_OPT="${JAVA_OPT} -Drocketmq.client.logUseSlf4j=true" +JAVA_OPT="${JAVA_OPT} -DeventMeshPluginDir=${EVENTMESH_HOME}/plugin" + +#if [ -f "pid.file" ]; then +# pid=`cat pid.file` +# if ! is_proxyRunning "$pid"; then +# echo "proxy is running already" +# exit 9; +# else +# echo "err pid$pid, rm pid.file" +# rm pid.file +# fi +#fi + +pid=$(get_pid) +if [[ $pid == "ERROR"* ]]; then + echo -e "${pid}" + exit 9 +fi +if [ -n "$pid" ]; then + echo -e "ERROR\t The server is already running (pid=$pid), there is no need to execute start.sh again." + exit 9 +fi + +make_logs_dir + +echo "Using Java version: $JAVA_VERSION, path: $JAVA" >> ${EVENTMESH_LOG_HOME}/eventmesh.out + +EVENTMESH_MAIN=org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter +if [ $DOCKER ]; then + $JAVA $JAVA_OPT -classpath ${EVENTMESH_HOME}/conf:${EVENTMESH_HOME}/apps/*:${EVENTMESH_HOME}/lib/* $EVENTMESH_MAIN >> ${EVENTMESH_LOG_HOME}/eventmesh.out +else + $JAVA $JAVA_OPT -classpath ${EVENTMESH_HOME}/conf:${EVENTMESH_HOME}/apps/*:${EVENTMESH_HOME}/lib/* $EVENTMESH_MAIN >> ${EVENTMESH_LOG_HOME}/eventmesh.out 2>&1 & +echo $!>${EVENTMESH_HOME}/bin/pid.file +fi +exit 0 diff --git a/eventmesh-runtime-v2/bin/stop-v2.sh b/eventmesh-runtime-v2/bin/stop-v2.sh new file mode 100644 index 0000000000..177ae1e129 --- /dev/null +++ b/eventmesh-runtime-v2/bin/stop-v2.sh @@ -0,0 +1,88 @@ +#!/bin/bash +# +# Licensed to Apache Software Foundation (ASF) under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Apache Software Foundation (ASF) licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Detect operating system +OS=$(uname) + +EVENTMESH_HOME=`cd $(dirname $0)/.. && pwd` + +export EVENTMESH_HOME + +function get_pid { + local ppid="" + if [ -f ${EVENTMESH_HOME}/bin/pid.file ]; then + ppid=$(cat ${EVENTMESH_HOME}/bin/pid.file) + # If the process does not exist, it indicates that the previous process terminated abnormally. + if [ ! -d /proc/$ppid ]; then + # Remove the residual file and return an error status. + rm ${EVENTMESH_HOME}/bin/pid.file + echo -e "ERROR\t EventMesh process had already terminated unexpectedly before, please check log output." + ppid="" + fi + else + if [[ $OS =~ Msys ]]; then + # There is a Bug on Msys that may not be able to kill the identified process + ppid=`jps -v | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | grep java | grep -v grep | awk -F ' ' {'print $1'}` + elif [[ $OS =~ Darwin ]]; then + # Known problem: grep Java may not be able to accurately identify Java processes + ppid=$(/bin/ps -o user,pid,command | grep "java" | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | grep -Ev "^root" |awk -F ' ' {'print $2'}) + else + # It is required to identify the process as accurately as possible on Linux + ppid=$(ps -C java -o user,pid,command --cols 99999 | grep -w $EVENTMESH_HOME | grep -i "org.apache.eventmesh.runtime.boot.RuntimeInstanceStarter" | grep -Ev "^root" |awk -F ' ' {'print $2'}) + fi + fi + echo "$ppid"; +} + +pid=$(get_pid) +if [[ $pid == "ERROR"* ]]; then + echo -e "${pid}" + exit 9 +fi +if [ -z "$pid" ];then + echo -e "ERROR\t No EventMesh server running." + exit 9 +fi + +kill ${pid} +echo "Send shutdown request to EventMesh(${pid}) OK" + +[[ $OS =~ Msys ]] && PS_PARAM=" -W " +stop_timeout=60 +for no in $(seq 1 $stop_timeout); do + if ps $PS_PARAM -p "$pid" 2>&1 > /dev/null; then + if [ $no -lt $stop_timeout ]; then + echo "[$no] server shutting down ..." + sleep 1 + continue + fi + + echo "shutdown server timeout, kill process: $pid" + kill -9 $pid; sleep 1; break; + echo "`date +'%Y-%m-%-d %H:%M:%S'` , pid : [$pid] , error message : abnormal shutdown which can not be closed within 60s" > ../logs/shutdown.error + else + echo "shutdown server ok!"; break; + fi +done + +if [ -f "pid.file" ]; then + rm pid.file +fi + + diff --git a/eventmesh-runtime-v2/build.gradle b/eventmesh-runtime-v2/build.gradle new file mode 100644 index 0000000000..74b9759b10 --- /dev/null +++ b/eventmesh-runtime-v2/build.gradle @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'java' +} + +group 'org.apache.eventmesh' +version '1.10.0-release' + +repositories { + mavenCentral() +} + +dependencies { + compileOnly 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' + + api project (":eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api") + api project (":eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-admin") + implementation project(":eventmesh-openconnect:eventmesh-openconnect-java") + implementation project(":eventmesh-common") + implementation project(":eventmesh-connectors:eventmesh-connector-canal") + implementation project(":eventmesh-connectors:eventmesh-connector-http") + implementation project(":eventmesh-function:eventmesh-function-api") + implementation project(":eventmesh-function:eventmesh-function-filter") + implementation project(":eventmesh-function:eventmesh-function-transformer") + implementation project(":eventmesh-meta:eventmesh-meta-api") + implementation project(":eventmesh-meta:eventmesh-meta-nacos") + implementation project(":eventmesh-registry:eventmesh-registry-api") + implementation project(":eventmesh-registry:eventmesh-registry-nacos") + implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") + implementation project(":eventmesh-storage-plugin:eventmesh-storage-standalone") + + implementation "io.grpc:grpc-core" + implementation "io.grpc:grpc-protobuf" + implementation "io.grpc:grpc-stub" + implementation "io.grpc:grpc-netty" + implementation "io.grpc:grpc-netty-shaded" +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/Runtime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/Runtime.java new file mode 100644 index 0000000000..608ef96da7 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/Runtime.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime; + +/** + * Runtime + */ +public interface Runtime { + + void init() throws Exception; + + void start() throws Exception; + + void stop() throws Exception; + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeFactory.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeFactory.java new file mode 100644 index 0000000000..ed273030d9 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeFactory.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime; + +/** + * RuntimeFactory + */ +public interface RuntimeFactory extends AutoCloseable { + + void init() throws Exception; + + Runtime createRuntime(RuntimeInstanceConfig runtimeInstanceConfig); + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java new file mode 100644 index 0000000000..caa5330fe3 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/RuntimeInstanceConfig.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime; + +import org.apache.eventmesh.common.config.Config; +import org.apache.eventmesh.common.enums.ComponentType; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@Config(path = "classPath://runtime.yaml") +public class RuntimeInstanceConfig { + + private boolean registryEnabled; + + private String registryServerAddr; + + private String registryPluginType; + + private String storagePluginType; + + private String adminServiceName; + + private String adminServiceAddr; + + private ComponentType componentType; + + private String runtimeInstanceId; + + private String runtimeInstanceName; + + private String runtimeInstanceDesc; + + private String runtimeInstanceVersion; + + private String runtimeInstanceConfig; + + private String runtimeInstanceStatus; + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java new file mode 100644 index 0000000000..beb1d1eedc --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstance.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.boot; + +import org.apache.eventmesh.registry.QueryInstances; +import org.apache.eventmesh.registry.RegisterServerInfo; +import org.apache.eventmesh.registry.RegistryFactory; +import org.apache.eventmesh.registry.RegistryService; +import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeFactory; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; +import org.apache.eventmesh.runtime.connector.ConnectorRuntimeFactory; +import org.apache.eventmesh.runtime.function.FunctionRuntimeFactory; +import org.apache.eventmesh.runtime.mesh.MeshRuntimeFactory; + +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class RuntimeInstance { + + private String adminServiceAddr; + + private Map adminServerInfoMap = new HashMap<>(); + + private RegistryService registryService; + + private Runtime runtime; + + private RuntimeFactory runtimeFactory; + + private final RuntimeInstanceConfig runtimeInstanceConfig; + + private volatile boolean isStarted = false; + + public RuntimeInstance(RuntimeInstanceConfig runtimeInstanceConfig) { + this.runtimeInstanceConfig = runtimeInstanceConfig; + if (runtimeInstanceConfig.isRegistryEnabled()) { + this.registryService = RegistryFactory.getInstance(runtimeInstanceConfig.getRegistryPluginType()); + } + } + + public void init() throws Exception { + if (registryService != null) { + registryService.init(); + QueryInstances queryInstances = new QueryInstances(); + queryInstances.setServiceName(runtimeInstanceConfig.getAdminServiceName()); + queryInstances.setHealth(true); + List adminServerRegisterInfoList = registryService.selectInstances(queryInstances); + if (!adminServerRegisterInfoList.isEmpty()) { + adminServiceAddr = getRandomAdminServerAddr(adminServerRegisterInfoList); + } else { + throw new RuntimeException("admin server address is empty, please check"); + } + // use registry adminServiceAddr value replace config + runtimeInstanceConfig.setAdminServiceAddr(adminServiceAddr); + } else { + adminServiceAddr = runtimeInstanceConfig.getAdminServiceAddr(); + } + + runtimeFactory = initRuntimeFactory(runtimeInstanceConfig); + runtime = runtimeFactory.createRuntime(runtimeInstanceConfig); + runtime.init(); + } + + public void start() throws Exception { + if (StringUtils.isBlank(adminServiceAddr)) { + throw new RuntimeException("admin server address is empty, please check"); + } else { + if (registryService != null) { + registryService.subscribe((event) -> { + log.info("runtime receive registry event: {}", event); + List registerServerInfoList = event.getInstances(); + Map registerServerInfoMap = new HashMap<>(); + for (RegisterServerInfo registerServerInfo : registerServerInfoList) { + registerServerInfoMap.put(registerServerInfo.getAddress(), registerServerInfo); + } + if (!registerServerInfoMap.isEmpty()) { + adminServerInfoMap = registerServerInfoMap; + updateAdminServerAddr(); + } + }, runtimeInstanceConfig.getAdminServiceName()); + } + runtime.start(); + isStarted = true; + } + } + + public void shutdown() throws Exception { + runtime.stop(); + } + + private void updateAdminServerAddr() throws Exception { + if (isStarted) { + if (!adminServerInfoMap.containsKey(adminServiceAddr)) { + adminServiceAddr = getRandomAdminServerAddr(adminServerInfoMap); + log.info("admin server address changed to: {}", adminServiceAddr); + shutdown(); + start(); + } + } else { + adminServiceAddr = getRandomAdminServerAddr(adminServerInfoMap); + } + } + + private String getRandomAdminServerAddr(Map adminServerInfoMap) { + ArrayList addresses = new ArrayList<>(adminServerInfoMap.keySet()); + Random random = new Random(); + int randomIndex = random.nextInt(addresses.size()); + return addresses.get(randomIndex); + } + + private String getRandomAdminServerAddr(List adminServerRegisterInfoList) { + Random random = new Random(); + int randomIndex = random.nextInt(adminServerRegisterInfoList.size()); + return adminServerRegisterInfoList.get(randomIndex).getAddress(); + } + + private RuntimeFactory initRuntimeFactory(RuntimeInstanceConfig runtimeInstanceConfig) { + switch (runtimeInstanceConfig.getComponentType()) { + case CONNECTOR: + return new ConnectorRuntimeFactory(); + case FUNCTION: + return new FunctionRuntimeFactory(); + case MESH: + return new MeshRuntimeFactory(); + default: + throw new RuntimeException("unsupported runtime type: " + runtimeInstanceConfig.getComponentType()); + } + } + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstanceStarter.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstanceStarter.java new file mode 100644 index 0000000000..42745c8dd7 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/boot/RuntimeInstanceStarter.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.boot; + +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; +import org.apache.eventmesh.runtime.util.BannerUtil; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class RuntimeInstanceStarter { + + public static void main(String[] args) { + try { + RuntimeInstanceConfig runtimeInstanceConfig = ConfigService.getInstance().buildConfigInstance(RuntimeInstanceConfig.class); + RuntimeInstance runtimeInstance = new RuntimeInstance(runtimeInstanceConfig); + BannerUtil.generateBanner(); + runtimeInstance.init(); + runtimeInstance.start(); + + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + try { + log.info("runtime shutting down hook begin."); + long start = System.currentTimeMillis(); + runtimeInstance.shutdown(); + long end = System.currentTimeMillis(); + + log.info("runtime shutdown cost {}ms", end - start); + } catch (Exception e) { + log.error("exception when shutdown {}", e.getMessage(), e); + } + })); + } catch (Throwable e) { + log.error("runtime start fail {}.", e.getMessage(), e); + System.exit(-1); + } + + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java new file mode 100644 index 0000000000..3d3c864b58 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntime.java @@ -0,0 +1,596 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.connector; + +import org.apache.eventmesh.api.consumer.Consumer; +import org.apache.eventmesh.api.factory.StoragePluginFactory; +import org.apache.eventmesh.api.producer.Producer; +import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.config.connector.offset.OffsetStorageConfig; +import org.apache.eventmesh.common.enums.ConnectorStage; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceBlockingStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.JobState; +import org.apache.eventmesh.common.remote.request.FetchJobRequest; +import org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest; +import org.apache.eventmesh.common.remote.request.ReportJobRequest; +import org.apache.eventmesh.common.remote.request.ReportVerifyRequest; +import org.apache.eventmesh.common.remote.response.FetchJobResponse; +import org.apache.eventmesh.common.utils.IPUtils; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.factory.ConnectorPluginFactory; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendExceptionContext; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendMessageCallback; +import org.apache.eventmesh.openconnect.offsetmgmt.api.callback.SendResult; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.RecordOffsetManagement; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.DefaultOffsetManagementServiceImpl; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetManagementService; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetStorageReaderImpl; +import org.apache.eventmesh.openconnect.offsetmgmt.api.storage.OffsetStorageWriterImpl; +import org.apache.eventmesh.openconnect.util.ConfigUtil; +import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; +import org.apache.eventmesh.spi.EventMeshExtensionFactory; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Random; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.stub.StreamObserver; + +import com.google.protobuf.Any; +import com.google.protobuf.UnsafeByteOperations; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ConnectorRuntime implements Runtime { + + private RuntimeInstanceConfig runtimeInstanceConfig; + + private ConnectorRuntimeConfig connectorRuntimeConfig; + + private ManagedChannel channel; + + private AdminServiceStub adminServiceStub; + + private AdminServiceBlockingStub adminServiceBlockingStub; + + StreamObserver responseObserver; + + StreamObserver requestObserver; + + private Source sourceConnector; + + private Sink sinkConnector; + + private OffsetStorageWriterImpl offsetStorageWriter; + + private OffsetStorageReaderImpl offsetStorageReader; + + private OffsetManagementService offsetManagementService; + + private RecordOffsetManagement offsetManagement; + + private volatile RecordOffsetManagement.CommittableOffsets committableOffsets; + + private Producer producer; + + private Consumer consumer; + + private final ExecutorService sourceService = ThreadPoolFactory.createSingleExecutor("eventMesh-sourceService"); + + private final ExecutorService sinkService = ThreadPoolFactory.createSingleExecutor("eventMesh-sinkService"); + + private final ScheduledExecutorService heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(); + + private final ExecutorService reportVerifyExecutor = Executors.newSingleThreadExecutor(); + + private final BlockingQueue queue; + + private volatile boolean isRunning = false; + + private volatile boolean isFailed = false; + + public static final String CALLBACK_EXTENSION = "callBackExtension"; + + private String adminServerAddr; + + + public ConnectorRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { + this.runtimeInstanceConfig = runtimeInstanceConfig; + this.queue = new LinkedBlockingQueue<>(1000); + } + + @Override + public void init() throws Exception { + + initAdminService(); + + initStorageService(); + + initConnectorService(); + } + + private void initAdminService() { + adminServerAddr = getRandomAdminServerAddr(runtimeInstanceConfig.getAdminServiceAddr()); + // create gRPC channel + channel = ManagedChannelBuilder.forTarget(adminServerAddr).usePlaintext().build(); + + adminServiceStub = AdminServiceGrpc.newStub(channel).withWaitForReady(); + + adminServiceBlockingStub = AdminServiceGrpc.newBlockingStub(channel).withWaitForReady(); + + responseObserver = new StreamObserver() { + @Override + public void onNext(Payload response) { + log.info("runtime receive message: {} ", response); + } + + @Override + public void onError(Throwable t) { + log.error("runtime receive error message: {}", t.getMessage()); + } + + @Override + public void onCompleted() { + log.info("runtime finished receive message and completed"); + } + }; + + requestObserver = adminServiceStub.invokeBiStream(responseObserver); + } + + private String getRandomAdminServerAddr(String adminServerAddrList) { + String[] addresses = adminServerAddrList.split(";"); + if (addresses.length == 0) { + throw new IllegalArgumentException("Admin server address list is empty"); + } + Random random = new Random(); + int randomIndex = random.nextInt(addresses.length); + return addresses[randomIndex]; + } + + private void initStorageService() { + // TODO: init producer & consumer + producer = StoragePluginFactory.getMeshMQProducer(runtimeInstanceConfig.getStoragePluginType()); + + consumer = StoragePluginFactory.getMeshMQPushConsumer(runtimeInstanceConfig.getStoragePluginType()); + + } + + private void initConnectorService() throws Exception { + + connectorRuntimeConfig = ConfigService.getInstance().buildConfigInstance(ConnectorRuntimeConfig.class); + + FetchJobResponse jobResponse = fetchJobConfig(); + + if (jobResponse == null) { + isFailed = true; + stop(); + throw new RuntimeException("fetch job config fail"); + } + + connectorRuntimeConfig.setSourceConnectorType(jobResponse.getTransportType().getSrc().getName()); + connectorRuntimeConfig.setSourceConnectorDesc(jobResponse.getConnectorConfig().getSourceConnectorDesc()); + connectorRuntimeConfig.setSourceConnectorConfig(jobResponse.getConnectorConfig().getSourceConnectorConfig()); + + connectorRuntimeConfig.setSinkConnectorType(jobResponse.getTransportType().getDst().getName()); + connectorRuntimeConfig.setSinkConnectorDesc(jobResponse.getConnectorConfig().getSinkConnectorDesc()); + connectorRuntimeConfig.setSinkConnectorConfig(jobResponse.getConnectorConfig().getSinkConnectorConfig()); + + // spi load offsetMgmtService + this.offsetManagement = new RecordOffsetManagement(); + this.committableOffsets = RecordOffsetManagement.CommittableOffsets.EMPTY; + OffsetStorageConfig offsetStorageConfig = new OffsetStorageConfig(); + offsetStorageConfig.setOffsetStorageAddr(connectorRuntimeConfig.getRuntimeConfig().get("offsetStorageAddr").toString()); + offsetStorageConfig.setOffsetStorageType(connectorRuntimeConfig.getRuntimeConfig().get("offsetStoragePluginType").toString()); + offsetStorageConfig.setDataSourceType(jobResponse.getTransportType().getSrc()); + offsetStorageConfig.setDataSinkType(jobResponse.getTransportType().getDst()); + Map offsetStorageExtensions = new HashMap<>(); + offsetStorageExtensions.put("jobId", connectorRuntimeConfig.getJobID()); + offsetStorageConfig.setExtensions(offsetStorageExtensions); + + this.offsetManagementService = Optional.ofNullable(offsetStorageConfig).map(OffsetStorageConfig::getOffsetStorageType) + .map(storageType -> EventMeshExtensionFactory.getExtension(OffsetManagementService.class, storageType)) + .orElse(new DefaultOffsetManagementServiceImpl()); + this.offsetManagementService.initialize(offsetStorageConfig); + this.offsetStorageWriter = new OffsetStorageWriterImpl(offsetManagementService); + this.offsetStorageReader = new OffsetStorageReaderImpl(offsetManagementService); + + ConnectorCreateService sourceConnectorCreateService = + ConnectorPluginFactory.createConnector(connectorRuntimeConfig.getSourceConnectorType() + "-Source"); + sourceConnector = (Source) sourceConnectorCreateService.create(); + + SourceConfig sourceConfig = (SourceConfig) ConfigUtil.parse(connectorRuntimeConfig.getSourceConnectorConfig(), sourceConnector.configClass()); + SourceConnectorContext sourceConnectorContext = new SourceConnectorContext(); + sourceConnectorContext.setSourceConfig(sourceConfig); + sourceConnectorContext.setRuntimeConfig(connectorRuntimeConfig.getRuntimeConfig()); + sourceConnectorContext.setJobType(jobResponse.getType()); + sourceConnectorContext.setOffsetStorageReader(offsetStorageReader); + if (CollectionUtils.isNotEmpty(jobResponse.getPosition())) { + sourceConnectorContext.setRecordPositionList(jobResponse.getPosition()); + } + sourceConnector.init(sourceConnectorContext); + + ConnectorCreateService sinkConnectorCreateService = + ConnectorPluginFactory.createConnector(connectorRuntimeConfig.getSinkConnectorType() + "-Sink"); + sinkConnector = (Sink) sinkConnectorCreateService.create(); + + SinkConfig sinkConfig = (SinkConfig) ConfigUtil.parse(connectorRuntimeConfig.getSinkConnectorConfig(), sinkConnector.configClass()); + SinkConnectorContext sinkConnectorContext = new SinkConnectorContext(); + sinkConnectorContext.setSinkConfig(sinkConfig); + sinkConnectorContext.setRuntimeConfig(connectorRuntimeConfig.getRuntimeConfig()); + sinkConnectorContext.setJobType(jobResponse.getType()); + sinkConnector.init(sinkConnectorContext); + + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.INIT); + + } + + private FetchJobResponse fetchJobConfig() { + String jobId = connectorRuntimeConfig.getJobID(); + FetchJobRequest jobRequest = new FetchJobRequest(); + jobRequest.setJobID(jobId); + + Metadata metadata = Metadata.newBuilder().setType(FetchJobRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(jobRequest)))).build()) + .build(); + Payload response = adminServiceBlockingStub.invoke(request); + if (response.getMetadata().getType().equals(FetchJobResponse.class.getSimpleName())) { + return JsonUtils.parseObject(response.getBody().getValue().toStringUtf8(), FetchJobResponse.class); + } + return null; + } + + @Override + public void start() throws Exception { + + heartBeatExecutor.scheduleAtFixedRate(() -> { + + ReportHeartBeatRequest heartBeat = new ReportHeartBeatRequest(); + heartBeat.setAddress(IPUtils.getLocalAddress()); + heartBeat.setReportedTimeStamp(String.valueOf(System.currentTimeMillis())); + heartBeat.setJobID(connectorRuntimeConfig.getJobID()); + + Metadata metadata = Metadata.newBuilder().setType(ReportHeartBeatRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(heartBeat)))).build()) + .build(); + + requestObserver.onNext(request); + }, 5, 5, TimeUnit.SECONDS); + + // start offsetMgmtService + offsetManagementService.start(); + isRunning = true; + // start sinkService + sinkService.execute(() -> { + try { + startSinkConnector(); + } catch (Exception e) { + isFailed = true; + log.error("sink connector [{}] start fail", sinkConnector.name(), e); + try { + this.stop(); + } catch (Exception ex) { + log.error("Failed to stop after exception", ex); + } + throw new RuntimeException(e); + } + }); + // start + sourceService.execute(() -> { + try { + startSourceConnector(); + } catch (Exception e) { + isFailed = true; + log.error("source connector [{}] start fail", sourceConnector.name(), e); + try { + this.stop(); + } catch (Exception ex) { + log.error("Failed to stop after exception", ex); + } + throw new RuntimeException(e); + } + }); + + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.RUNNING); + } + + @Override + public void stop() throws Exception { + log.info("ConnectorRuntime start stop"); + isRunning = false; + if (isFailed) { + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.FAIL); + } else { + reportJobRequest(connectorRuntimeConfig.getJobID(), JobState.COMPLETE); + } + sourceConnector.stop(); + sinkConnector.stop(); + sourceService.shutdown(); + sinkService.shutdown(); + heartBeatExecutor.shutdown(); + reportVerifyExecutor.shutdown(); + requestObserver.onCompleted(); + if (channel != null && !channel.isShutdown()) { + channel.shutdown(); + } + } + + private void startSourceConnector() throws Exception { + sourceConnector.start(); + while (isRunning) { + List connectorRecordList = sourceConnector.poll(); + // TODO: use producer pub record to storage replace below + if (connectorRecordList != null && !connectorRecordList.isEmpty()) { + for (ConnectRecord record : connectorRecordList) { + // check recordUniqueId + if (record.getExtensions() == null || !record.getExtensions().containsKey("recordUniqueId")) { + record.addExtension("recordUniqueId", record.getRecordId()); + } + + queue.put(record); + + // if enabled incremental data reporting consistency check + if (connectorRuntimeConfig.enableIncrementalDataConsistencyCheck) { + reportVerifyRequest(record, connectorRuntimeConfig, ConnectorStage.SOURCE); + } + + // set a callback for this record + // if used the memory storage callback will be triggered after sink put success + record.setCallback(new SendMessageCallback() { + @Override + public void onSuccess(SendResult result) { + log.debug("send record to sink callback success, record: {}", record); + // commit record + sourceConnector.commit(record); + if (record.getPosition() != null) { + Optional submittedRecordPosition = prepareToUpdateRecordOffset(record); + submittedRecordPosition.ifPresent(RecordOffsetManagement.SubmittedPosition::ack); + log.debug("start wait all messages to commit"); + offsetManagement.awaitAllMessages(5000, TimeUnit.MILLISECONDS); + // update & commit offset + updateCommittableOffsets(); + commitOffsets(); + } + Optional callback = + Optional.ofNullable(record.getExtensionObj(CALLBACK_EXTENSION)).map(v -> (SendMessageCallback) v); + callback.ifPresent(cb -> cb.onSuccess(convertToSendResult(record))); + } + + @Override + public void onException(SendExceptionContext sendExceptionContext) { + isFailed = true; + // handle exception + sourceConnector.onException(record); + log.error("send record to sink callback exception, process will shut down, record: {}", record, + sendExceptionContext.getCause()); + try { + stop(); + } catch (Exception e) { + log.error("Failed to stop after exception", e); + } + } + }); + } + } + } + } + + private SendResult convertToSendResult(ConnectRecord record) { + SendResult result = new SendResult(); + result.setMessageId(record.getRecordId()); + if (StringUtils.isNotEmpty(record.getExtension("topic"))) { + result.setTopic(record.getExtension("topic")); + } + return result; + } + + private void reportVerifyRequest(ConnectRecord record, ConnectorRuntimeConfig connectorRuntimeConfig, ConnectorStage connectorStage) { + reportVerifyExecutor.submit(() -> { + try { + // use record data + recordUniqueId for md5 + String md5Str = md5(record.getData().toString() + record.getExtension("recordUniqueId")); + ReportVerifyRequest reportVerifyRequest = new ReportVerifyRequest(); + reportVerifyRequest.setTaskID(connectorRuntimeConfig.getTaskID()); + reportVerifyRequest.setJobID(connectorRuntimeConfig.getJobID()); + reportVerifyRequest.setRecordID(record.getRecordId()); + reportVerifyRequest.setRecordSig(md5Str); + reportVerifyRequest.setConnectorName( + IPUtils.getLocalAddress() + "_" + connectorRuntimeConfig.getJobID() + "_" + connectorRuntimeConfig.getRegion()); + reportVerifyRequest.setConnectorStage(connectorStage.name()); + reportVerifyRequest.setPosition(JsonUtils.toJSONString(record.getPosition())); + + Metadata metadata = Metadata.newBuilder().setType(ReportVerifyRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody( + Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportVerifyRequest)))) + .build()) + .build(); + + requestObserver.onNext(request); + } catch (Exception e) { + log.error("Failed to report verify request", e); + } + }); + } + + private void reportJobRequest(String jobId, JobState jobState) throws InterruptedException { + ReportJobRequest reportJobRequest = new ReportJobRequest(); + reportJobRequest.setJobID(jobId); + reportJobRequest.setState(jobState); + Metadata metadata = Metadata.newBuilder() + .setType(ReportJobRequest.class.getSimpleName()) + .build(); + Payload payload = Payload.newBuilder() + .setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportJobRequest)))) + .build()) + .build(); + requestObserver.onNext(payload); + } + + private String md5(String input) { + try { + MessageDigest md = MessageDigest.getInstance("MD5"); + byte[] messageDigest = md.digest(input.getBytes()); + StringBuilder sb = new StringBuilder(); + for (byte b : messageDigest) { + sb.append(String.format("%02x", b)); + } + return sb.toString(); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + } + + public Optional prepareToUpdateRecordOffset(ConnectRecord record) { + return Optional.of(this.offsetManagement.submitRecord(record.getPosition())); + } + + public void updateCommittableOffsets() { + RecordOffsetManagement.CommittableOffsets newOffsets = offsetManagement.committableOffsets(); + synchronized (this) { + this.committableOffsets = this.committableOffsets.updatedWith(newOffsets); + } + } + + public boolean commitOffsets() { + log.info("Start Committing offsets"); + + long timeout = System.currentTimeMillis() + 5000L; + + RecordOffsetManagement.CommittableOffsets offsetsToCommit; + synchronized (this) { + offsetsToCommit = this.committableOffsets; + this.committableOffsets = RecordOffsetManagement.CommittableOffsets.EMPTY; + } + + if (committableOffsets.isEmpty()) { + log.debug( + "Either no records were produced since the last offset commit, " + + "or every record has been filtered out by a transformation or dropped due to transformation or conversion errors."); + // We continue with the offset commit process here instead of simply returning immediately + // in order to invoke SourceTask::commit and record metrics for a successful offset commit + } else { + log.info("{} Committing offsets for {} acknowledged messages", this, committableOffsets.numCommittableMessages()); + if (committableOffsets.hasPending()) { + log.debug( + "{} There are currently {} pending messages spread across {} source partitions whose offsets will not be committed." + + " The source partition with the most pending messages is {}, with {} pending messages", + this, + committableOffsets.numUncommittableMessages(), committableOffsets.numDeques(), committableOffsets.largestDequePartition(), + committableOffsets.largestDequeSize()); + } else { + log.debug( + "{} There are currently no pending messages for this offset commit; " + + "all messages dispatched to the task's producer since the last commit have been acknowledged", + this); + } + } + + // write offset to memory + offsetsToCommit.offsets().forEach(offsetStorageWriter::writeOffset); + + // begin flush + if (!offsetStorageWriter.beginFlush()) { + return true; + } + + // using offsetManagementService to persist offset + Future flushFuture = offsetStorageWriter.doFlush(); + try { + flushFuture.get(Math.max(timeout - System.currentTimeMillis(), 0), TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + log.warn("{} Flush of offsets interrupted, cancelling", this); + offsetStorageWriter.cancelFlush(); + return false; + } catch (ExecutionException e) { + log.error("{} Flush of offsets threw an unexpected exception: ", this, e); + offsetStorageWriter.cancelFlush(); + return false; + } catch (TimeoutException e) { + log.error("{} Timed out waiting to flush offsets to storage; will try again on next flush interval with latest offsets", this); + offsetStorageWriter.cancelFlush(); + return false; + } + return true; + } + + private void startSinkConnector() throws Exception { + sinkConnector.start(); + while (isRunning) { + // TODO: use consumer sub from storage to replace below + ConnectRecord connectRecord = null; + try { + connectRecord = queue.poll(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + log.error("poll connect record error", e); + } + if (connectRecord == null) { + continue; + } + List connectRecordList = new ArrayList<>(); + connectRecordList.add(connectRecord); + sinkConnector.put(connectRecordList); + // if enabled incremental data reporting consistency check + if (connectorRuntimeConfig.enableIncrementalDataConsistencyCheck) { + reportVerifyRequest(connectRecord, connectorRuntimeConfig, ConnectorStage.SINK); + } + } + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntimeConfig.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntimeConfig.java new file mode 100644 index 0000000000..ab6fc3aaf5 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntimeConfig.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.connector; + +import org.apache.eventmesh.common.config.Config; + +import java.util.Map; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@Config(path = "classPath://connector.yaml") +public class ConnectorRuntimeConfig { + + private String connectorRuntimeInstanceId; + + private String taskID; + + private String jobID; + + private String region; + + private Map runtimeConfig; + + private String sourceConnectorType; + + private String sourceConnectorDesc; + + private Map sourceConnectorConfig; + + private String sinkConnectorType; + + private String sinkConnectorDesc; + + private Map sinkConnectorConfig; + + public boolean enableIncrementalDataConsistencyCheck = true; + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntimeFactory.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntimeFactory.java new file mode 100644 index 0000000000..d1ec2ff4e9 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/connector/ConnectorRuntimeFactory.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.connector; + +import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeFactory; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; + +public class ConnectorRuntimeFactory implements RuntimeFactory { + + @Override + public void init() throws Exception { + + } + + @Override + public Runtime createRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { + return new ConnectorRuntime(runtimeInstanceConfig); + } + + @Override + public void close() throws Exception { + + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java new file mode 100644 index 0000000000..4a68001909 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntime.java @@ -0,0 +1,503 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.function; + +import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.common.config.connector.SinkConfig; +import org.apache.eventmesh.common.config.connector.SourceConfig; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceBlockingStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.AdminServiceGrpc.AdminServiceStub; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Metadata; +import org.apache.eventmesh.common.protocol.grpc.adminserver.Payload; +import org.apache.eventmesh.common.remote.JobState; +import org.apache.eventmesh.common.remote.exception.ErrorCode; +import org.apache.eventmesh.common.remote.job.JobType; +import org.apache.eventmesh.common.remote.request.FetchJobRequest; +import org.apache.eventmesh.common.remote.request.ReportHeartBeatRequest; +import org.apache.eventmesh.common.remote.request.ReportJobRequest; +import org.apache.eventmesh.common.remote.response.FetchJobResponse; +import org.apache.eventmesh.common.utils.IPUtils; +import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.function.api.AbstractEventMeshFunctionChain; +import org.apache.eventmesh.function.api.EventMeshFunction; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.patternbuild.PatternBuilder; +import org.apache.eventmesh.function.transformer.Transformer; +import org.apache.eventmesh.function.transformer.TransformerBuilder; +import org.apache.eventmesh.function.transformer.TransformerType; +import org.apache.eventmesh.openconnect.api.ConnectorCreateService; +import org.apache.eventmesh.openconnect.api.connector.SinkConnectorContext; +import org.apache.eventmesh.openconnect.api.connector.SourceConnectorContext; +import org.apache.eventmesh.openconnect.api.factory.ConnectorPluginFactory; +import org.apache.eventmesh.openconnect.api.sink.Sink; +import org.apache.eventmesh.openconnect.api.source.Source; +import org.apache.eventmesh.openconnect.offsetmgmt.api.data.ConnectRecord; +import org.apache.eventmesh.openconnect.util.ConfigUtil; +import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; + +import org.apache.commons.lang3.StringUtils; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Random; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.stub.StreamObserver; + +import com.google.protobuf.Any; +import com.google.protobuf.UnsafeByteOperations; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class FunctionRuntime implements Runtime { + + private final RuntimeInstanceConfig runtimeInstanceConfig; + + private ManagedChannel channel; + + private AdminServiceStub adminServiceStub; + + private AdminServiceBlockingStub adminServiceBlockingStub; + + StreamObserver responseObserver; + + StreamObserver requestObserver; + + private final LinkedBlockingQueue queue; + + private FunctionRuntimeConfig functionRuntimeConfig; + + private AbstractEventMeshFunctionChain functionChain; + + private Sink sinkConnector; + + private Source sourceConnector; + + private final ExecutorService sourceService = ThreadPoolFactory.createSingleExecutor("eventMesh-sourceService"); + + private final ExecutorService sinkService = ThreadPoolFactory.createSingleExecutor("eventMesh-sinkService"); + + private final ScheduledExecutorService heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(); + + private volatile boolean isRunning = false; + + private volatile boolean isFailed = false; + + private String adminServerAddr; + + + public FunctionRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { + this.runtimeInstanceConfig = runtimeInstanceConfig; + this.queue = new LinkedBlockingQueue<>(1000); + } + + + @Override + public void init() throws Exception { + // load function runtime config from local file + this.functionRuntimeConfig = ConfigService.getInstance().buildConfigInstance(FunctionRuntimeConfig.class); + + // init admin service + initAdminService(); + + // get remote config from admin service and update local config + getAndUpdateRemoteConfig(); + + // init connector service + initConnectorService(); + + // report status to admin server + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.INIT); + } + + private void initAdminService() { + adminServerAddr = getRandomAdminServerAddr(runtimeInstanceConfig.getAdminServiceAddr()); + // create gRPC channel + channel = ManagedChannelBuilder.forTarget(adminServerAddr).usePlaintext().build(); + + adminServiceStub = AdminServiceGrpc.newStub(channel).withWaitForReady(); + + adminServiceBlockingStub = AdminServiceGrpc.newBlockingStub(channel).withWaitForReady(); + + responseObserver = new StreamObserver() { + @Override + public void onNext(Payload response) { + log.info("runtime receive message: {} ", response); + } + + @Override + public void onError(Throwable t) { + log.error("runtime receive error message: {}", t.getMessage()); + } + + @Override + public void onCompleted() { + log.info("runtime finished receive message and completed"); + } + }; + + requestObserver = adminServiceStub.invokeBiStream(responseObserver); + } + + private String getRandomAdminServerAddr(String adminServerAddrList) { + String[] addresses = adminServerAddrList.split(";"); + if (addresses.length == 0) { + throw new IllegalArgumentException("Admin server address list is empty"); + } + Random random = new Random(); + int randomIndex = random.nextInt(addresses.length); + return addresses[randomIndex]; + } + + private void getAndUpdateRemoteConfig() { + String jobId = functionRuntimeConfig.getJobID(); + FetchJobRequest jobRequest = new FetchJobRequest(); + jobRequest.setJobID(jobId); + + Metadata metadata = Metadata.newBuilder().setType(FetchJobRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(jobRequest)))).build()) + .build(); + Payload response = adminServiceBlockingStub.invoke(request); + FetchJobResponse jobResponse = null; + if (response.getMetadata().getType().equals(FetchJobResponse.class.getSimpleName())) { + jobResponse = JsonUtils.parseObject(response.getBody().getValue().toStringUtf8(), FetchJobResponse.class); + } + + if (jobResponse == null || jobResponse.getErrorCode() != ErrorCode.SUCCESS) { + if (jobResponse != null) { + log.error("Failed to get remote config from admin server. ErrorCode: {}, Response: {}", + jobResponse.getErrorCode(), jobResponse); + } else { + log.error("Failed to get remote config from admin server. "); + } + isFailed = true; + try { + stop(); + } catch (Exception e) { + log.error("Failed to stop after exception", e); + } + throw new RuntimeException("Failed to get remote config from admin server."); + } + + // update local config + // source + functionRuntimeConfig.setSourceConnectorType(jobResponse.getTransportType().getSrc().getName()); + functionRuntimeConfig.setSourceConnectorDesc(jobResponse.getConnectorConfig().getSourceConnectorDesc()); + functionRuntimeConfig.setSourceConnectorConfig(jobResponse.getConnectorConfig().getSourceConnectorConfig()); + + // sink + functionRuntimeConfig.setSinkConnectorType(jobResponse.getTransportType().getDst().getName()); + functionRuntimeConfig.setSinkConnectorDesc(jobResponse.getConnectorConfig().getSinkConnectorDesc()); + functionRuntimeConfig.setSinkConnectorConfig(jobResponse.getConnectorConfig().getSinkConnectorConfig()); + + // TODO: update functionConfigs + + } + + + private void initConnectorService() throws Exception { + final JobType jobType = (JobType) functionRuntimeConfig.getRuntimeConfig().get("jobType"); + + // create sink connector + ConnectorCreateService sinkConnectorCreateService = + ConnectorPluginFactory.createConnector(functionRuntimeConfig.getSinkConnectorType() + "-Sink"); + this.sinkConnector = (Sink) sinkConnectorCreateService.create(); + + // parse sink config and init sink connector + SinkConfig sinkConfig = (SinkConfig) ConfigUtil.parse(functionRuntimeConfig.getSinkConnectorConfig(), sinkConnector.configClass()); + SinkConnectorContext sinkConnectorContext = new SinkConnectorContext(); + sinkConnectorContext.setSinkConfig(sinkConfig); + sinkConnectorContext.setRuntimeConfig(functionRuntimeConfig.getRuntimeConfig()); + sinkConnectorContext.setJobType(jobType); + sinkConnector.init(sinkConnectorContext); + + // create source connector + ConnectorCreateService sourceConnectorCreateService = + ConnectorPluginFactory.createConnector(functionRuntimeConfig.getSourceConnectorType() + "-Source"); + this.sourceConnector = (Source) sourceConnectorCreateService.create(); + + // parse source config and init source connector + SourceConfig sourceConfig = (SourceConfig) ConfigUtil.parse(functionRuntimeConfig.getSourceConnectorConfig(), sourceConnector.configClass()); + SourceConnectorContext sourceConnectorContext = new SourceConnectorContext(); + sourceConnectorContext.setSourceConfig(sourceConfig); + sourceConnectorContext.setRuntimeConfig(functionRuntimeConfig.getRuntimeConfig()); + sourceConnectorContext.setJobType(jobType); + + sourceConnector.init(sourceConnectorContext); + } + + private void reportJobRequest(String jobId, JobState jobState) { + ReportJobRequest reportJobRequest = new ReportJobRequest(); + reportJobRequest.setJobID(jobId); + reportJobRequest.setState(jobState); + Metadata metadata = Metadata.newBuilder() + .setType(ReportJobRequest.class.getSimpleName()) + .build(); + Payload payload = Payload.newBuilder() + .setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(reportJobRequest)))) + .build()) + .build(); + requestObserver.onNext(payload); + } + + + @Override + public void start() throws Exception { + this.isRunning = true; + + // build function chain + this.functionChain = buildFunctionChain(functionRuntimeConfig.getFunctionConfigs()); + + // start heart beat + this.heartBeatExecutor.scheduleAtFixedRate(() -> { + + ReportHeartBeatRequest heartBeat = new ReportHeartBeatRequest(); + heartBeat.setAddress(IPUtils.getLocalAddress()); + heartBeat.setReportedTimeStamp(String.valueOf(System.currentTimeMillis())); + heartBeat.setJobID(functionRuntimeConfig.getJobID()); + + Metadata metadata = Metadata.newBuilder().setType(ReportHeartBeatRequest.class.getSimpleName()).build(); + + Payload request = Payload.newBuilder().setMetadata(metadata) + .setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(Objects.requireNonNull(JsonUtils.toJSONBytes(heartBeat)))).build()) + .build(); + + requestObserver.onNext(request); + }, 5, 5, TimeUnit.SECONDS); + + // start sink service + this.sinkService.execute(() -> { + try { + startSinkConnector(); + } catch (Exception e) { + isFailed = true; + log.error("Sink Connector [{}] failed to start.", sinkConnector.name(), e); + try { + this.stop(); + } catch (Exception ex) { + log.error("Failed to stop after exception", ex); + } + throw new RuntimeException(e); + } + }); + + // start source service + this.sourceService.execute(() -> { + try { + startSourceConnector(); + } catch (Exception e) { + isFailed = true; + log.error("Source Connector [{}] failed to start.", sourceConnector.name(), e); + try { + this.stop(); + } catch (Exception ex) { + log.error("Failed to stop after exception", ex); + } + throw new RuntimeException(e); + } + }); + + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.RUNNING); + } + + private StringEventMeshFunctionChain buildFunctionChain(List> functionConfigs) { + StringEventMeshFunctionChain functionChain = new StringEventMeshFunctionChain(); + + // build function chain + for (Map functionConfig : functionConfigs) { + String functionType = String.valueOf(functionConfig.getOrDefault("functionType", "")); + if (StringUtils.isEmpty(functionType)) { + throw new IllegalArgumentException("'functionType' is required for function"); + } + + // build function based on functionType + EventMeshFunction function; + switch (functionType) { + case "filter": + function = buildFilter(functionConfig); + break; + case "transformer": + function = buildTransformer(functionConfig); + break; + default: + throw new IllegalArgumentException( + "Invalid functionType: '" + functionType + "'. Supported functionType: 'filter', 'transformer'"); + } + + // add function to functionChain + functionChain.addLast(function); + } + + return functionChain; + } + + + @SuppressWarnings("unchecked") + private Pattern buildFilter(Map functionConfig) { + // get condition from attributes + Object condition = functionConfig.get("condition"); + if (condition == null) { + throw new IllegalArgumentException("'condition' is required for filter function"); + } + if (condition instanceof String) { + return PatternBuilder.build(String.valueOf(condition)); + } else if (condition instanceof Map) { + return PatternBuilder.build((Map) condition); + } else { + throw new IllegalArgumentException("Invalid condition"); + } + } + + private Transformer buildTransformer(Map functionConfig) { + // get transformerType from attributes + String transformerTypeStr = String.valueOf(functionConfig.getOrDefault("transformerType", "")).toLowerCase(); + TransformerType transformerType = TransformerType.getItem(transformerTypeStr); + if (transformerType == null) { + throw new IllegalArgumentException( + "Invalid transformerType: '" + transformerTypeStr + + "'. Supported transformerType: 'constant', 'template', 'original' (case insensitive)"); + } + + // build transformer + Transformer transformer = null; + + switch (transformerType) { + case CONSTANT: + // check value + String content = String.valueOf(functionConfig.getOrDefault("content", "")); + if (StringUtils.isEmpty(content)) { + throw new IllegalArgumentException("'content' is required for constant transformer"); + } + transformer = TransformerBuilder.buildConstantTransformer(content); + break; + case TEMPLATE: + // check value and template + Object valueMap = functionConfig.get("valueMap"); + String template = String.valueOf(functionConfig.getOrDefault("template", "")); + if (valueMap == null || StringUtils.isEmpty(template)) { + throw new IllegalArgumentException("'valueMap' and 'template' are required for template transformer"); + } + transformer = TransformerBuilder.buildTemplateTransFormer(valueMap, template); + break; + case ORIGINAL: + // ORIGINAL transformer does not need any parameter + break; + default: + throw new IllegalArgumentException( + "Invalid transformerType: '" + transformerType + "', supported transformerType: 'CONSTANT', 'TEMPLATE', 'ORIGINAL'"); + } + + return transformer; + } + + + private void startSinkConnector() throws Exception { + // start sink connector + this.sinkConnector.start(); + + // try to get data from queue and send it. + while (this.isRunning) { + ConnectRecord connectRecord = null; + try { + connectRecord = queue.poll(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + log.error("Failed to poll data from queue.", e); + Thread.currentThread().interrupt(); + } + + // send data if not null + if (connectRecord != null) { + sinkConnector.put(Collections.singletonList(connectRecord)); + } + } + } + + private void startSourceConnector() throws Exception { + // start source connector + this.sourceConnector.start(); + + // try to get data from source connector and handle it. + while (this.isRunning) { + List connectorRecordList = sourceConnector.poll(); + + // handle data + if (connectorRecordList != null && !connectorRecordList.isEmpty()) { + for (ConnectRecord connectRecord : connectorRecordList) { + if (connectRecord == null || connectRecord.getData() == null) { + // If data is null, just put it into queue. + this.queue.put(connectRecord); + } else { + // Apply function chain to data + String data = functionChain.apply((String) connectRecord.getData()); + if (data != null) { + if (log.isDebugEnabled()) { + log.debug("Function chain applied. Original data: {}, Transformed data: {}", connectRecord.getData(), data); + } + connectRecord.setData(data); + this.queue.put(connectRecord); + } else if (log.isDebugEnabled()) { + log.debug("Data filtered out by function chain. Original data: {}", connectRecord.getData()); + } + } + } + } + } + } + + + @Override + public void stop() throws Exception { + log.info("FunctionRuntime is stopping..."); + + isRunning = false; + + if (isFailed) { + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.FAIL); + } else { + reportJobRequest(functionRuntimeConfig.getJobID(), JobState.COMPLETE); + } + + sinkConnector.stop(); + sourceConnector.stop(); + sinkService.shutdown(); + sourceService.shutdown(); + heartBeatExecutor.shutdown(); + + requestObserver.onCompleted(); + if (channel != null && !channel.isShutdown()) { + channel.shutdown(); + } + + log.info("FunctionRuntime stopped."); + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java new file mode 100644 index 0000000000..4d57c83e82 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeConfig.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.function; + +import org.apache.eventmesh.common.config.Config; + +import java.util.List; +import java.util.Map; + + +import lombok.Data; + +@Data +@Config(path = "classPath://function.yaml") +public class FunctionRuntimeConfig { + + private String functionRuntimeInstanceId; + + private String taskID; + + private String jobID; + + private String region; + + private Map runtimeConfig; + + private String sourceConnectorType; + + private String sourceConnectorDesc; + + private Map sourceConnectorConfig; + + private String sinkConnectorType; + + private String sinkConnectorDesc; + + private Map sinkConnectorConfig; + + private List> functionConfigs; + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java new file mode 100644 index 0000000000..40346e272f --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/FunctionRuntimeFactory.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.function; + +import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeFactory; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; + +public class FunctionRuntimeFactory implements RuntimeFactory { + + @Override + public void init() throws Exception { + + } + + @Override + public Runtime createRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { + return new FunctionRuntime(runtimeInstanceConfig); + } + + @Override + public void close() throws Exception { + + } + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java new file mode 100644 index 0000000000..0035999ecb --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/function/StringEventMeshFunctionChain.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.function; + +import org.apache.eventmesh.function.api.AbstractEventMeshFunctionChain; +import org.apache.eventmesh.function.api.EventMeshFunction; + +/** + * ConnectRecord Function Chain. + */ +public class StringEventMeshFunctionChain extends AbstractEventMeshFunctionChain { + + @Override + public String apply(String content) { + for (EventMeshFunction function : functions) { + if (content == null) { + break; + } + content = function.apply(content); + } + return content; + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/ConnectorManager.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/ConnectorManager.java new file mode 100644 index 0000000000..2354a350db --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/ConnectorManager.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.manager; + +public class ConnectorManager { +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/FunctionManager.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/FunctionManager.java new file mode 100644 index 0000000000..8c88be9986 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/FunctionManager.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.manager; + +public class FunctionManager { +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/MeshManager.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/MeshManager.java new file mode 100644 index 0000000000..cc67b9fb40 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/manager/MeshManager.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.manager; + +public class MeshManager { +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntime.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntime.java new file mode 100644 index 0000000000..eb186c7658 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntime.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.mesh; + +import org.apache.eventmesh.runtime.Runtime; + +public class MeshRuntime implements Runtime { + + @Override + public void init() throws Exception { + + } + + @Override + public void start() throws Exception { + + } + + @Override + public void stop() throws Exception { + + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntimeConfig.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntimeConfig.java new file mode 100644 index 0000000000..cd21eb1a11 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntimeConfig.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.mesh; + +public class MeshRuntimeConfig { +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntimeFactory.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntimeFactory.java new file mode 100644 index 0000000000..32a3f2e38e --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/mesh/MeshRuntimeFactory.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.mesh; + +import org.apache.eventmesh.runtime.Runtime; +import org.apache.eventmesh.runtime.RuntimeFactory; +import org.apache.eventmesh.runtime.RuntimeInstanceConfig; + +public class MeshRuntimeFactory implements RuntimeFactory { + + @Override + public void init() throws Exception { + + } + + @Override + public Runtime createRuntime(RuntimeInstanceConfig runtimeInstanceConfig) { + return null; + } + + @Override + public void close() throws Exception { + + } + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/meta/MetaStorage.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/meta/MetaStorage.java new file mode 100644 index 0000000000..41da6994f7 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/meta/MetaStorage.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.meta; + +import org.apache.eventmesh.api.exception.MetaException; +import org.apache.eventmesh.api.meta.MetaService; +import org.apache.eventmesh.api.meta.MetaServiceListener; +import org.apache.eventmesh.api.meta.bo.EventMeshAppSubTopicInfo; +import org.apache.eventmesh.api.meta.bo.EventMeshServicePubTopicInfo; +import org.apache.eventmesh.api.meta.dto.EventMeshDataInfo; +import org.apache.eventmesh.api.meta.dto.EventMeshRegisterInfo; +import org.apache.eventmesh.api.meta.dto.EventMeshUnRegisterInfo; +import org.apache.eventmesh.spi.EventMeshExtensionFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class MetaStorage { + + private static final Map META_CACHE = new HashMap<>(16); + + private MetaService metaService; + + private final AtomicBoolean inited = new AtomicBoolean(false); + + private final AtomicBoolean started = new AtomicBoolean(false); + + private final AtomicBoolean shutdown = new AtomicBoolean(false); + + private MetaStorage() { + + } + + public static MetaStorage getInstance(String metaPluginType) { + return META_CACHE.computeIfAbsent(metaPluginType, MetaStorage::metaStorageBuilder); + } + + private static MetaStorage metaStorageBuilder(String metaPluginType) { + MetaService metaServiceExt = EventMeshExtensionFactory.getExtension(MetaService.class, metaPluginType); + if (metaServiceExt == null) { + String errorMsg = "can't load the metaService plugin, please check."; + log.error(errorMsg); + throw new RuntimeException(errorMsg); + } + MetaStorage metaStorage = new MetaStorage(); + metaStorage.metaService = metaServiceExt; + + return metaStorage; + } + + public void init() throws MetaException { + if (!inited.compareAndSet(false, true)) { + return; + } + metaService.init(); + } + + public void start() throws MetaException { + if (!started.compareAndSet(false, true)) { + return; + } + metaService.start(); + } + + public void shutdown() throws MetaException { + inited.compareAndSet(true, false); + started.compareAndSet(true, false); + if (!shutdown.compareAndSet(false, true)) { + return; + } + synchronized (this) { + metaService.shutdown(); + } + } + + public List findEventMeshInfoByCluster(String clusterName) throws MetaException { + return metaService.findEventMeshInfoByCluster(clusterName); + } + + public List findAllEventMeshInfo() throws MetaException { + return metaService.findAllEventMeshInfo(); + } + + public Map> findEventMeshClientDistributionData(String clusterName, String group, String purpose) + throws MetaException { + return metaService.findEventMeshClientDistributionData(clusterName, group, purpose); + } + + public void registerMetadata(Map metadata) { + metaService.registerMetadata(metadata); + } + + public void updateMetaData(Map metadata) { + metaService.updateMetaData(metadata); + } + + public boolean register(EventMeshRegisterInfo eventMeshRegisterInfo) throws MetaException { + return metaService.register(eventMeshRegisterInfo); + } + + public boolean unRegister(EventMeshUnRegisterInfo eventMeshUnRegisterInfo) throws MetaException { + return metaService.unRegister(eventMeshUnRegisterInfo); + } + + public List findEventMeshServicePubTopicInfos() throws Exception { + return metaService.findEventMeshServicePubTopicInfos(); + } + + public EventMeshAppSubTopicInfo findEventMeshAppSubTopicInfo(String group) throws Exception { + return metaService.findEventMeshAppSubTopicInfoByGroup(group); + } + + public Map getMetaData(String key, boolean fuzzyEnabled) { + return metaService.getMetaData(key, fuzzyEnabled); + } + + public void getMetaDataWithListener(MetaServiceListener metaServiceListener, String key) throws Exception { + metaService.getMetaDataWithListener(metaServiceListener, key); + } + + public AtomicBoolean getInited() { + return inited; + } + + public AtomicBoolean getStarted() { + return started; + } +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/util/BannerUtil.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/util/BannerUtil.java new file mode 100644 index 0000000000..2569494189 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/util/BannerUtil.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.util; + +import lombok.extern.slf4j.Slf4j; + +/** + * EventMesh banner util + */ +@Slf4j +public class BannerUtil { + + private static final String LOGO = + " EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEME EMEMEMEME EMEMEMEME " + System.lineSeparator() + + " EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEME EMEMEMEMEMEMEMEME EMEMEMEMEMEMEMEMEM " + System.lineSeparator() + + " EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEM EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEME " + System.lineSeparator() + + "EMEMEMEMEMEM EMEMEMEMEM EMEMEMEMEMEMEMEME EMEMEMEMEME" + System.lineSeparator() + + "EMEMEMEME EMEMEMEMEM EMEMEMEMEMEME EMEMEMEME" + System.lineSeparator() + + "EMEMEME EMEMEMEMEM EMEME EMEMEMEM" + System.lineSeparator() + + "EMEMEME EMEMEMEMEM EMEMEME" + System.lineSeparator() + + "EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEM EMEMEMEMEM EMEMEME" + System.lineSeparator() + + "EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEM EMEMEMEMEM EMEMEME" + System.lineSeparator() + + "EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEM EMEMEMEMEM EMEMEME" + System.lineSeparator() + + "EMEMEME EMEMEMEMEM EMEMEME" + System.lineSeparator() + + "EMEMEME EMEMEMEMEM EMEMEME" + System.lineSeparator() + + "EMEMEMEME EMEMEMEMEM EMEMEMEME" + System.lineSeparator() + + "EMEMEMEMEMEM EMEMEMEMEM EMEMEMEMEMEM" + System.lineSeparator() + + " EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEME EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEM " + System.lineSeparator() + + " EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEM EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEME " + System.lineSeparator() + + " MEMEMEMEMEMEMEMEMEMEMEMEMEMEME EMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEMEME"; + + private static final String LOGONAME = + " ____ _ __ __ _ " + System.lineSeparator() + + " / ____|_ _____ _ __ | |_| \\/ | ___ ___| |__ " + System.lineSeparator() + + " | __|\\ \\ / / _ | '_ \\| __| |\\/| |/ _ |/ __| '_ \\ " + System.lineSeparator() + + " | |___ \\ V / __| | | | |_| | | | __|\\__ \\ | | |" + System.lineSeparator() + + " \\ ____| \\_/ \\___|_| |_|\\__|_| |_|\\___||___/_| |_|"; + + public static void generateBanner() { + String banner = + System.lineSeparator() + + System.lineSeparator() + + LOGO + + System.lineSeparator() + + LOGONAME + + System.lineSeparator(); + if (log.isInfoEnabled()) { + log.info(banner); + } else { + System.out.print(banner); + } + } + +} diff --git a/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/util/RuntimeUtils.java b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/util/RuntimeUtils.java new file mode 100644 index 0000000000..e389357d93 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/java/org/apache/eventmesh/runtime/util/RuntimeUtils.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.util; + +public class RuntimeUtils { +} diff --git a/eventmesh-runtime-v2/src/main/resources/connector.yaml b/eventmesh-runtime-v2/src/main/resources/connector.yaml new file mode 100644 index 0000000000..3e407fa3e9 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/resources/connector.yaml @@ -0,0 +1,23 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +taskID: 9c18a0d2-7a61-482c-8275-34f8c2786cea +jobID: a01fd5e1-d295-4b89-99bc-0ae23eb85acf +region: region1 +runtimeConfig: # this used for connector runtime config + offsetStoragePluginType: admin + offsetStorageAddr: "127.0.0.1:8081;127.0.0.1:8081" \ No newline at end of file diff --git a/eventmesh-runtime-v2/src/main/resources/function.yaml b/eventmesh-runtime-v2/src/main/resources/function.yaml new file mode 100644 index 0000000000..eae2b063ec --- /dev/null +++ b/eventmesh-runtime-v2/src/main/resources/function.yaml @@ -0,0 +1,21 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +taskID: c6233632-ab9a-4aba-904f-9d22fba6aa74 +jobID: 8190fe5b-1f9b-4815-8983-2467e76edbf0 +region: region1 + diff --git a/eventmesh-runtime-v2/src/main/resources/runtime.yaml b/eventmesh-runtime-v2/src/main/resources/runtime.yaml new file mode 100644 index 0000000000..9ac36f27b0 --- /dev/null +++ b/eventmesh-runtime-v2/src/main/resources/runtime.yaml @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +componentType: CONNECTOR +registryEnabled: false +registryServerAddr: 127.0.0.1:8085 +registryPluginType: nacos +storagePluginType: memory +adminServiceName: eventmesh-admin +adminServiceAddr: "127.0.0.1:8081;127.0.0.1:8081" diff --git a/eventmesh-runtime/build.gradle b/eventmesh-runtime/build.gradle index 95924faad4..b016e18bfe 100644 --- a/eventmesh-runtime/build.gradle +++ b/eventmesh-runtime/build.gradle @@ -36,8 +36,10 @@ dependencies { implementation "commons-validator:commons-validator" implementation project(":eventmesh-common") - implementation project(":eventmesh-filter") implementation project(":eventmesh-spi") + implementation project(":eventmesh-function:eventmesh-function-api") + implementation project(":eventmesh-function:eventmesh-function-filter") + implementation project(":eventmesh-function:eventmesh-function-transformer") implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") implementation project(":eventmesh-storage-plugin:eventmesh-storage-standalone") implementation project(":eventmesh-storage-plugin:eventmesh-storage-rocketmq") @@ -45,7 +47,6 @@ dependencies { implementation project(":eventmesh-security-plugin:eventmesh-security-acl") implementation project(":eventmesh-security-plugin:eventmesh-security-auth-http-basic") implementation project(":eventmesh-security-plugin:eventmesh-security-auth-token") - implementation project(":eventmesh-transformer") implementation project(":eventmesh-meta:eventmesh-meta-api") implementation project(":eventmesh-meta:eventmesh-meta-nacos") implementation project(":eventmesh-protocol-plugin:eventmesh-protocol-api") diff --git a/eventmesh-runtime/conf/admin-server.jks b/eventmesh-runtime/conf/admin-server.jks new file mode 100644 index 0000000000..92deb897a4 Binary files /dev/null and b/eventmesh-runtime/conf/admin-server.jks differ diff --git a/eventmesh-runtime/conf/eventmesh.properties b/eventmesh-runtime/conf/eventmesh.properties index 4984181ca8..9c2e5ee0d4 100644 --- a/eventmesh-runtime/conf/eventmesh.properties +++ b/eventmesh-runtime/conf/eventmesh.properties @@ -24,11 +24,9 @@ eventMesh.sysid=0000 eventMesh.server.tcp.port=10000 eventMesh.server.http.port=10105 eventMesh.server.grpc.port=10205 -# HTTP Admin Server eventMesh.server.admin.http.port=10106 -########################## EventMesh TCP Configuration ########################## -eventMesh.server.tcp.enabled=true +########################## EventMesh Network Configuration ########################## eventMesh.server.tcp.readerIdleSeconds=120 eventMesh.server.tcp.writerIdleSeconds=120 eventMesh.server.tcp.allIdleSeconds=120 @@ -64,10 +62,30 @@ eventMesh.server.retry.plugin.type=default eventMesh.server.gracefulShutdown.sleepIntervalInMills=1000 eventMesh.server.rebalanceRedirect.sleepIntervalInMills=200 +# TLS +eventMesh.server.useTls.enabled=false +eventMesh.server.ssl.protocol=TLSv1.1 +eventMesh.server.ssl.cer=sChat2.jks +eventMesh.server.ssl.pass=sNetty + # ip address blacklist eventMesh.server.blacklist.ipv4=0.0.0.0/8,127.0.0.0/8,169.254.0.0/16,255.255.255.255/32 eventMesh.server.blacklist.ipv6=::/128,::1/128,ff00::/8 +########################## EventMesh HTTP Admin Configuration ########################## +# thread pool +eventMesh.server.admin.threads.num=2 + +# TLS +eventMesh.server.admin.useTls.enabled=false +eventMesh.server.admin.ssl.protocol=TLSv1.3 +eventMesh.server.admin.ssl.cer=admin-server.jks +eventMesh.server.admin.ssl.pass=eventmesh-admin-server + +# ip address blacklist +eventMesh.server.admin.blacklist.ipv4=0.0.0.0/8,127.0.0.0/8,169.254.0.0/16,255.255.255.255/32 +eventMesh.server.admin.blacklist.ipv6=::/128,::1/128,ff00::/8 + ########################## EventMesh Plugin Configuration ########################## # storage plugin eventMesh.storage.plugin.type=standalone diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/AdminHandlerManager.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/AdminHandlerManager.java index 1da928dc10..35d01a5e3a 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/AdminHandlerManager.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/AdminHandlerManager.java @@ -24,7 +24,6 @@ import org.apache.eventmesh.runtime.admin.handler.v1.HTTPClientHandler; import org.apache.eventmesh.runtime.admin.handler.v1.InsertWebHookConfigHandler; import org.apache.eventmesh.runtime.admin.handler.v1.MetaHandler; -import org.apache.eventmesh.runtime.admin.handler.v1.MetricsHandler; import org.apache.eventmesh.runtime.admin.handler.v1.QueryRecommendEventMeshHandler; import org.apache.eventmesh.runtime.admin.handler.v1.QueryWebHookConfigByIdHandler; import org.apache.eventmesh.runtime.admin.handler.v1.QueryWebHookConfigByManufacturerHandler; @@ -100,7 +99,6 @@ public void registerHttpHandler() { eventMeshTCPServer.getEventMeshTCPConfiguration(), eventMeshHTTPServer.getEventMeshHttpConfiguration(), eventMeshGrpcServer.getEventMeshGrpcConfiguration())); - initHandler(new MetricsHandler(eventMeshHTTPServer, eventMeshTCPServer)); initHandler(new TopicHandler(eventMeshTCPServer.getEventMeshTCPConfiguration().getEventMeshStoragePluginType())); initHandler(new EventHandler(eventMeshTCPServer.getEventMeshTCPConfiguration().getEventMeshStoragePluginType())); initHandler(new MetaHandler(eventMeshMetaStorage)); diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v1/MetricsHandler.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v1/MetricsHandler.java deleted file mode 100644 index a2be02aaf1..0000000000 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v1/MetricsHandler.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.runtime.admin.handler.v1; - -import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.runtime.admin.handler.AbstractHttpHandler; -import org.apache.eventmesh.runtime.admin.response.v1.GetMetricsResponse; -import org.apache.eventmesh.runtime.boot.EventMeshHTTPServer; -import org.apache.eventmesh.runtime.boot.EventMeshTCPServer; -import org.apache.eventmesh.runtime.common.EventMeshHttpHandler; -import org.apache.eventmesh.runtime.metrics.http.HttpMetrics; -import org.apache.eventmesh.runtime.metrics.tcp.TcpMetrics; - -import java.io.IOException; - -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.http.HttpRequest; - -import lombok.extern.slf4j.Slf4j; - -/** - * This class handles the {@code /metrics} endpoint, corresponding to the {@code eventmesh-dashboard} path {@code /metrics}. - *

- * This handler is responsible for retrieving summary information of metrics, including HTTP and TCP metrics. - * - * @see AbstractHttpHandler - */ - -@Slf4j -@EventMeshHttpHandler(path = "/metrics") -public class MetricsHandler extends AbstractHttpHandler { - - private final HttpMetrics httpMetrics; - private final TcpMetrics tcpMetrics; - - /** - * Constructs a new instance with the provided EventMesh server instance. - * - * @param eventMeshHTTPServer the HTTP server instance of EventMesh - * @param eventMeshTcpServer the TCP server instance of EventMesh - */ - public MetricsHandler(EventMeshHTTPServer eventMeshHTTPServer, - EventMeshTCPServer eventMeshTcpServer) { - super(); - this.httpMetrics = eventMeshHTTPServer.getEventMeshHttpMetricsManager().getHttpMetrics(); - this.tcpMetrics = eventMeshTcpServer.getEventMeshTcpMetricsManager().getTcpMetrics(); - } - - @Override - protected void get(HttpRequest httpRequest, ChannelHandlerContext ctx) throws IOException { - GetMetricsResponse getMetricsResponse = new GetMetricsResponse( - httpMetrics.maxHTTPTPS(), - httpMetrics.avgHTTPTPS(), - httpMetrics.maxHTTPCost(), - httpMetrics.avgHTTPCost(), - httpMetrics.avgHTTPBodyDecodeCost(), - httpMetrics.getHttpDiscard(), - httpMetrics.maxSendBatchMsgTPS(), - httpMetrics.avgSendBatchMsgTPS(), - httpMetrics.getSendBatchMsgNumSum(), - httpMetrics.getSendBatchMsgFailNumSum(), - httpMetrics.getSendBatchMsgFailRate(), - httpMetrics.getSendBatchMsgDiscardNumSum(), - httpMetrics.maxSendMsgTPS(), - httpMetrics.avgSendMsgTPS(), - httpMetrics.getSendMsgNumSum(), - httpMetrics.getSendMsgFailNumSum(), - httpMetrics.getSendMsgFailRate(), - httpMetrics.getReplyMsgNumSum(), - httpMetrics.getReplyMsgFailNumSum(), - httpMetrics.maxPushMsgTPS(), - httpMetrics.avgPushMsgTPS(), - httpMetrics.getHttpPushMsgNumSum(), - httpMetrics.getHttpPushFailNumSum(), - httpMetrics.getHttpPushMsgFailRate(), - httpMetrics.maxHTTPPushLatency(), - httpMetrics.avgHTTPPushLatency(), - httpMetrics.getBatchMsgQueueSize(), - httpMetrics.getSendMsgQueueSize(), - httpMetrics.getPushMsgQueueSize(), - httpMetrics.getHttpRetryQueueSize(), - httpMetrics.avgBatchSendMsgCost(), - httpMetrics.avgSendMsgCost(), - httpMetrics.avgReplyMsgCost(), - - tcpMetrics.getRetrySize(), - tcpMetrics.getClient2eventMeshTPS(), - tcpMetrics.getEventMesh2mqTPS(), - tcpMetrics.getMq2eventMeshTPS(), - tcpMetrics.getEventMesh2clientTPS(), - tcpMetrics.getAllTPS(), - tcpMetrics.getAllConnections(), - tcpMetrics.getSubTopicNum() - ); - String result = JsonUtils.toJSONString(getMetricsResponse); - writeJson(ctx, result); - } -} diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v2/ConfigurationHandler.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v2/ConfigurationHandler.java index 1c149ca521..234e0e79cb 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v2/ConfigurationHandler.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/admin/handler/v2/ConfigurationHandler.java @@ -27,6 +27,7 @@ import org.apache.eventmesh.runtime.configuration.EventMeshGrpcConfiguration; import org.apache.eventmesh.runtime.configuration.EventMeshHTTPConfiguration; import org.apache.eventmesh.runtime.configuration.EventMeshTCPConfiguration; +import org.apache.eventmesh.runtime.constants.EventMeshVersion; import org.apache.eventmesh.runtime.util.HttpRequestUtil; import java.lang.reflect.Field; @@ -126,7 +127,7 @@ protected void get(HttpRequest httpRequest, ChannelHandlerContext ctx) { eventMeshTCPConfiguration, eventMeshHTTPConfiguration, eventMeshGrpcConfiguration, - "v1.10.0-release" // TODO get version number after merging https://github.com/apache/eventmesh/pull/4055 + EventMeshVersion.getCurrentVersionDesc() ); String json = JSON.toJSONString(Result.success(getConfigurationResponse), filters.toArray(new Filter[0])); writeJson(ctx, json); diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java index 97f05a8764..3fcca51832 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractHTTPServer.java @@ -86,6 +86,8 @@ import io.netty.util.ReferenceCountUtil; import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** @@ -96,6 +98,8 @@ public abstract class AbstractHTTPServer extends AbstractRemotingServer { private final transient EventMeshHTTPConfiguration eventMeshHttpConfiguration; + @Getter + @Setter private EventMeshHttpMetricsManager eventMeshHttpMetricsManager; private static final DefaultHttpDataFactory DEFAULT_HTTP_DATA_FACTORY = new DefaultHttpDataFactory(false); @@ -105,8 +109,14 @@ public abstract class AbstractHTTPServer extends AbstractRemotingServer { } protected final transient AtomicBoolean started = new AtomicBoolean(false); + + @Getter private final transient boolean useTLS; + + @Getter + @Setter private Boolean useTrace = false; // Determine whether trace is enabled + private static final int MAX_CONNECTIONS = 20_000; /** @@ -118,10 +128,13 @@ public abstract class AbstractHTTPServer extends AbstractRemotingServer { private HttpConnectionHandler httpConnectionHandler; private HttpDispatcher httpDispatcher; + @Setter + @Getter private HandlerService handlerService; private final transient ThreadPoolExecutor asyncContextCompleteHandler = ThreadPoolFactory.createThreadPoolExecutor(10, 10, "EventMesh-http-asyncContext"); + @Getter private final HTTPThreadPoolGroup httpThreadPoolGroup; public AbstractHTTPServer(final int port, final boolean useTLS, @@ -523,32 +536,4 @@ protected void initChannel(final SocketChannel channel) { httpDispatcher); } } - - public void setUseTrace(final Boolean useTrace) { - this.useTrace = useTrace; - } - - public Boolean getUseTrace() { - return useTrace; - } - - public EventMeshHttpMetricsManager getEventMeshHttpMetricsManager() { - return eventMeshHttpMetricsManager; - } - - public void setEventMeshHttpMetricsManager(final EventMeshHttpMetricsManager eventMeshHttpMetricsManager) { - this.eventMeshHttpMetricsManager = eventMeshHttpMetricsManager; - } - - public HTTPThreadPoolGroup getHttpThreadPoolGroup() { - return httpThreadPoolGroup; - } - - public HandlerService getHandlerService() { - return handlerService; - } - - public void setHandlerService(HandlerService handlerService) { - this.handlerService = handlerService; - } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractRemotingServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractRemotingServer.java index fb09116620..e02637ec39 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractRemotingServer.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/AbstractRemotingServer.java @@ -22,7 +22,6 @@ import org.apache.eventmesh.common.utils.ThreadUtils; import org.apache.eventmesh.runtime.core.protocol.producer.ProducerManager; -import java.util.Objects; import java.util.concurrent.TimeUnit; import io.netty.channel.EventLoopGroup; @@ -31,22 +30,32 @@ import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.EventExecutorGroup; +import lombok.Getter; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** * The most basic server */ @Slf4j +@Getter public abstract class AbstractRemotingServer implements RemotingServer { private static final int MAX_THREADS = Runtime.getRuntime().availableProcessors(); private static final int DEFAULT_SLEEP_SECONDS = 30; + @Setter private EventLoopGroup bossGroup; + + @Setter private EventLoopGroup ioGroup; + + @Setter private EventExecutorGroup workerGroup; + protected ProducerManager producerManager; + @Setter private int port; protected void buildBossGroup(final String threadPrefix) { @@ -75,10 +84,6 @@ protected void initProducerManager() throws Exception { producerManager.init(); } - public ProducerManager getProducerManager() { - return producerManager; - } - public void init(final String threadPrefix) throws Exception { buildBossGroup(threadPrefix); buildIOGroup(threadPrefix); @@ -94,16 +99,16 @@ public void shutdown() throws Exception { bossGroup.shutdownGracefully(); log.info("shutdown bossGroup"); } - if (Objects.isNull(producerManager)) { + if (producerManager != null) { producerManager.shutdown(); } + ThreadUtils.randomPause(TimeUnit.SECONDS.toMillis(DEFAULT_SLEEP_SECONDS)); if (ioGroup != null) { ioGroup.shutdownGracefully(); log.info("shutdown ioGroup"); } - if (workerGroup != null) { workerGroup.shutdownGracefully(); @@ -114,36 +119,4 @@ public void shutdown() throws Exception { protected boolean useEpoll() { return SystemUtils.isLinuxPlatform() && Epoll.isAvailable(); } - - public EventLoopGroup getBossGroup() { - return bossGroup; - } - - public void setBossGroup(final EventLoopGroup bossGroup) { - this.bossGroup = bossGroup; - } - - public EventLoopGroup getIoGroup() { - return ioGroup; - } - - public void setIoGroup(final EventLoopGroup ioGroup) { - this.ioGroup = ioGroup; - } - - public EventExecutorGroup getWorkerGroup() { - return workerGroup; - } - - public void setWorkerGroup(final EventExecutorGroup workerGroup) { - this.workerGroup = workerGroup; - } - - public int getPort() { - return port; - } - - public void setPort(final int port) { - this.port = port; - } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminBootstrap.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminBootstrap.java index e125475ec8..b170afd3ab 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminBootstrap.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminBootstrap.java @@ -17,23 +17,38 @@ package org.apache.eventmesh.runtime.boot; +import static org.apache.eventmesh.common.Constants.ADMIN; + +import org.apache.eventmesh.common.config.ConfigService; +import org.apache.eventmesh.common.utils.ConfigurationContextUtil; +import org.apache.eventmesh.runtime.configuration.EventMeshAdminConfiguration; + +import lombok.Getter; + public class EventMeshAdminBootstrap implements EventMeshBootstrap { + @Getter private EventMeshAdminServer eventMeshAdminServer; - private EventMeshServer eventMeshServer; + private final EventMeshAdminConfiguration eventMeshAdminConfiguration; + + private final EventMeshServer eventMeshServer; public EventMeshAdminBootstrap(EventMeshServer eventMeshServer) { this.eventMeshServer = eventMeshServer; + + ConfigService configService = ConfigService.getInstance(); + this.eventMeshAdminConfiguration = configService.buildConfigInstance(EventMeshAdminConfiguration.class); + + ConfigurationContextUtil.putIfAbsent(ADMIN, eventMeshAdminConfiguration); } @Override public void init() throws Exception { if (eventMeshServer != null) { - eventMeshAdminServer = new EventMeshAdminServer(eventMeshServer); + eventMeshAdminServer = new EventMeshAdminServer(eventMeshServer, eventMeshAdminConfiguration); eventMeshAdminServer.init(); } - } @Override @@ -41,7 +56,6 @@ public void start() throws Exception { if (eventMeshAdminServer != null) { eventMeshAdminServer.start(); } - } @Override diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminServer.java index 4abba74a54..5e98fc690b 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminServer.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshAdminServer.java @@ -17,13 +17,22 @@ package org.apache.eventmesh.runtime.boot; +import org.apache.eventmesh.common.ThreadPoolFactory; +import org.apache.eventmesh.common.protocol.http.common.RequestCode; import org.apache.eventmesh.runtime.admin.handler.AdminHandlerManager; import org.apache.eventmesh.runtime.admin.handler.HttpHandler; +import org.apache.eventmesh.runtime.configuration.EventMeshAdminConfiguration; +import org.apache.eventmesh.runtime.core.protocol.http.processor.AdminMetricsProcessor; import org.apache.eventmesh.runtime.util.HttpResponseUtils; import java.net.URI; import java.util.Objects; import java.util.Optional; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLEngine; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFutureListener; @@ -41,27 +50,37 @@ import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpResponseEncoder; import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.ssl.SslHandler; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; @Slf4j public class EventMeshAdminServer extends AbstractHTTPServer { + private final EventMeshAdminConfiguration eventMeshAdminConfiguration; + private HttpConnectionHandler httpConnectionHandler = new HttpConnectionHandler(); private AdminHandlerManager adminHandlerManager; - public EventMeshAdminServer(EventMeshServer eventMeshServer) { - super(eventMeshServer.getEventMeshTCPServer().getEventMeshTCPConfiguration().getEventMeshServerAdminPort(), false, - eventMeshServer.getEventMeshHTTPServer().getEventMeshHttpConfiguration()); + @Getter + private ThreadPoolExecutor adminMetricsExecutor; + + public EventMeshAdminServer(final EventMeshServer eventMeshServer, final EventMeshAdminConfiguration eventMeshAdminConfiguration) { + super(eventMeshAdminConfiguration.getEventMeshServerAdminPort(), + eventMeshAdminConfiguration.isEventMeshServerUseTls(), + eventMeshAdminConfiguration); + this.eventMeshAdminConfiguration = eventMeshAdminConfiguration; adminHandlerManager = new AdminHandlerManager(eventMeshServer); } - @Override public void init() throws Exception { super.init("eventMesh-admin"); + initThreadPool(); adminHandlerManager.registerHttpHandler(); + registerAdminRequestProcessor(); } @Override @@ -71,8 +90,9 @@ public void start() throws Exception { try { bootstrap.group(this.getBossGroup(), this.getIoGroup()) .channel(useEpoll() ? EpollServerSocketChannel.class : NioServerSocketChannel.class) - .childHandler(new AdminServerInitializer()) - .childOption(ChannelOption.SO_KEEPALIVE, Boolean.TRUE); + .childHandler(new AdminServerInitializer( + this.isUseTLS() ? SSLContextFactory.getSslContext(eventMeshAdminConfiguration) : null, this.isUseTLS())) + .childOption(ChannelOption.AUTO_CLOSE, Boolean.TRUE); log.info("AdminHttpServer[port={}] started.", this.getPort()); @@ -95,29 +115,38 @@ public void start() throws Exception { started.compareAndSet(false, true); } - public void parseHttpRequest(ChannelHandlerContext ctx, HttpRequest httpRequest) { - String uriStr = httpRequest.uri(); - URI uri = URI.create(uriStr); - Optional httpHandlerOpt = adminHandlerManager.getHttpHandler(uri.getPath()); - if (httpHandlerOpt.isPresent()) { - try { - httpHandlerOpt.get().handle(httpRequest, ctx); - } catch (Exception e) { - log.error("admin server channelRead error", e); - ctx.writeAndFlush(HttpResponseUtils.buildHttpResponse(Objects.requireNonNull(e.getMessage()), ctx, - HttpHeaderValues.APPLICATION_JSON, HttpResponseStatus.INTERNAL_SERVER_ERROR)).addListener(ChannelFutureListener.CLOSE); - } - } else { - ctx.writeAndFlush(HttpResponseUtils.createNotFound()).addListener(ChannelFutureListener.CLOSE); - } + private void initThreadPool() { + adminMetricsExecutor = ThreadPoolFactory.createThreadPoolExecutor( + eventMeshAdminConfiguration.getEventMeshServerAdminThreadNum(), + eventMeshAdminConfiguration.getEventMeshServerAdminThreadNum(), + new LinkedBlockingQueue<>(50), "eventMesh-admin-metrics", true); + } + + private void registerAdminRequestProcessor() { + final AdminMetricsProcessor adminMetricsProcessor = new AdminMetricsProcessor(this); + registerProcessor(RequestCode.ADMIN_METRICS.getRequestCode(), adminMetricsProcessor); } private class AdminServerInitializer extends ChannelInitializer { + private final transient SSLContext sslContext; + private final transient boolean useTLS; + + public AdminServerInitializer(final SSLContext sslContext, final boolean useTLS) { + this.sslContext = sslContext; + this.useTLS = useTLS; + } + @Override protected void initChannel(final SocketChannel channel) { final ChannelPipeline pipeline = channel.pipeline(); + if (sslContext != null && useTLS) { + final SSLEngine sslEngine = sslContext.createSSLEngine(); + sslEngine.setUseClientMode(false); + pipeline.addFirst(getWorkerGroup(), "ssl", new SslHandler(sslEngine)); + } + pipeline.addLast(getWorkerGroup(), new HttpRequestDecoder(), new HttpResponseEncoder(), @@ -131,5 +160,22 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpRequest msg) { } }); } + + private void parseHttpRequest(ChannelHandlerContext ctx, HttpRequest httpRequest) { + String uriStr = httpRequest.uri(); + URI uri = URI.create(uriStr); + Optional httpHandlerOpt = adminHandlerManager.getHttpHandler(uri.getPath()); + if (httpHandlerOpt.isPresent()) { + try { + httpHandlerOpt.get().handle(httpRequest, ctx); + } catch (Exception e) { + log.error("admin server channelRead error", e); + ctx.writeAndFlush(HttpResponseUtils.buildHttpResponse(Objects.requireNonNull(e.getMessage()), ctx, + HttpHeaderValues.APPLICATION_JSON, HttpResponseStatus.INTERNAL_SERVER_ERROR)).addListener(ChannelFutureListener.CLOSE); + } + } else { + ctx.writeAndFlush(HttpResponseUtils.createNotFound()).addListener(ChannelFutureListener.CLOSE); + } + } } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshGrpcBootstrap.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshGrpcBootstrap.java index dc218084b1..b6e493c1bc 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshGrpcBootstrap.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshGrpcBootstrap.java @@ -23,10 +23,13 @@ import org.apache.eventmesh.common.utils.ConfigurationContextUtil; import org.apache.eventmesh.runtime.configuration.EventMeshGrpcConfiguration; +import lombok.Getter; + public class EventMeshGrpcBootstrap implements EventMeshBootstrap { private final EventMeshGrpcConfiguration eventMeshGrpcConfiguration; + @Getter private EventMeshGrpcServer eventMeshGrpcServer; private final EventMeshServer eventMeshServer; @@ -62,12 +65,4 @@ public void shutdown() throws Exception { eventMeshGrpcServer.shutdown(); } } - - public EventMeshGrpcServer getEventMeshGrpcServer() { - return eventMeshGrpcServer; - } - - public void setEventMeshGrpcServer(EventMeshGrpcServer eventMeshGrpcServer) { - this.eventMeshGrpcServer = eventMeshGrpcServer; - } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHTTPServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHTTPServer.java index cc47cc9727..1089a1cafb 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHTTPServer.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHTTPServer.java @@ -31,7 +31,6 @@ import org.apache.eventmesh.runtime.constants.EventMeshConstants; import org.apache.eventmesh.runtime.core.consumer.SubscriptionManager; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerManager; -import org.apache.eventmesh.runtime.core.protocol.http.processor.AdminMetricsProcessor; import org.apache.eventmesh.runtime.core.protocol.http.processor.BatchSendMessageProcessor; import org.apache.eventmesh.runtime.core.protocol.http.processor.BatchSendMessageV2Processor; import org.apache.eventmesh.runtime.core.protocol.http.processor.CreateTopicProcessor; @@ -68,6 +67,7 @@ import com.google.common.eventbus.EventBus; import com.google.common.util.concurrent.RateLimiter; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -75,6 +75,7 @@ * Add multiple managers to the underlying server */ @Slf4j +@Getter public class EventMeshHTTPServer extends AbstractHTTPServer { private final EventMeshServer eventMeshServer; @@ -95,7 +96,6 @@ public class EventMeshHTTPServer extends AbstractHTTPServer { private transient RateLimiter batchRateLimiter; public EventMeshHTTPServer(final EventMeshServer eventMeshServer, final EventMeshHTTPConfiguration eventMeshHttpConfiguration) { - super(eventMeshHttpConfiguration.getHttpServerPort(), eventMeshHttpConfiguration.isEventMeshServerUseTls(), eventMeshHttpConfiguration); @@ -103,7 +103,6 @@ public EventMeshHTTPServer(final EventMeshServer eventMeshServer, final EventMes this.eventMeshHttpConfiguration = eventMeshHttpConfiguration; this.metaStorage = eventMeshServer.getMetaStorage(); this.acl = eventMeshServer.getAcl(); - } public void init() throws Exception { @@ -251,9 +250,6 @@ private void registerHTTPRequestProcessor() throws Exception { final SendAsyncRemoteEventProcessor sendAsyncRemoteEventProcessor = new SendAsyncRemoteEventProcessor(this); this.getHandlerService().register(sendAsyncRemoteEventProcessor); - final AdminMetricsProcessor adminMetricsProcessor = new AdminMetricsProcessor(this); - registerProcessor(RequestCode.ADMIN_METRICS.getRequestCode(), adminMetricsProcessor); - final HeartBeatProcessor heartProcessor = new HeartBeatProcessor(this); registerProcessor(RequestCode.HEARTBEAT.getRequestCode(), heartProcessor); @@ -299,62 +295,4 @@ private void registerWebhook() throws Exception { this.getHandlerService().register(webHookProcessor, super.getHttpThreadPoolGroup().getWebhookExecutor()); } - - public SubscriptionManager getSubscriptionManager() { - return subscriptionManager; - } - - public ConsumerManager getConsumerManager() { - return consumerManager; - } - - public ProducerManager getProducerManager() { - return producerManager; - } - - public EventMeshHTTPConfiguration getEventMeshHttpConfiguration() { - return eventMeshHttpConfiguration; - } - - public EventBus getEventBus() { - return eventBus; - } - - public HttpRetryer getHttpRetryer() { - return httpRetryer; - } - - public Acl getAcl() { - return acl; - } - - public EventMeshServer getEventMeshServer() { - return eventMeshServer; - } - - public RateLimiter getMsgRateLimiter() { - return msgRateLimiter; - } - - public RateLimiter getBatchRateLimiter() { - return batchRateLimiter; - } - - public FilterEngine getFilterEngine() { - return filterEngine; - } - - public TransformerEngine getTransformerEngine() { - return transformerEngine; - } - - public MetaStorage getMetaStorage() { - return metaStorage; - } - - public HTTPClientPool getHttpClientPool() { - return httpClientPool; - } - - } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHttpBootstrap.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHttpBootstrap.java index 87ce3c23a4..df3b227e42 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHttpBootstrap.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshHttpBootstrap.java @@ -23,12 +23,15 @@ import org.apache.eventmesh.common.utils.ConfigurationContextUtil; import org.apache.eventmesh.runtime.configuration.EventMeshHTTPConfiguration; -public class EventMeshHttpBootstrap implements EventMeshBootstrap { +import lombok.Getter; - private final EventMeshHTTPConfiguration eventMeshHttpConfiguration; +public class EventMeshHttpBootstrap implements EventMeshBootstrap { + @Getter public EventMeshHTTPServer eventMeshHttpServer; + private final EventMeshHTTPConfiguration eventMeshHttpConfiguration; + private final EventMeshServer eventMeshServer; public EventMeshHttpBootstrap(final EventMeshServer eventMeshServer) { @@ -64,12 +67,4 @@ public void shutdown() throws Exception { eventMeshHttpServer.shutdown(); } } - - public EventMeshHTTPServer getEventMeshHttpServer() { - return eventMeshHttpServer; - } - - public void setEventMeshHttpServer(EventMeshHTTPServer eventMeshHttpServer) { - this.eventMeshHttpServer = eventMeshHttpServer; - } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java index b3250795d6..d61580b9c8 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshServer.java @@ -43,23 +43,32 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Collectors; +import lombok.Getter; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; @Slf4j public class EventMeshServer { + @Getter private final Acl acl; + @Getter + @Setter private MetaStorage metaStorage; + @Getter private static Trace trace; private final StorageResource storageResource; + @Getter private ServiceState serviceState; + @Getter private ProducerTopicManager producerTopicManager; + @Getter private final CommonConfiguration configuration; // private transient ClientManageController clientManageController; @@ -70,13 +79,17 @@ public class EventMeshServer { private static final ConfigService configService = ConfigService.getInstance(); - private EventMeshAdminBootstrap adminBootstrap; - + @Getter private EventMeshTCPServer eventMeshTCPServer = null; + @Getter + private EventMeshHTTPServer eventMeshHTTPServer = null; + + @Getter private EventMeshGrpcServer eventMeshGrpcServer = null; - private EventMeshHTTPServer eventMeshHTTPServer = null; + @Getter + private EventMeshAdminServer eventMeshAdminServer = null; private EventMeshMetricsManager eventMeshMetricsManager; @@ -105,8 +118,7 @@ public EventMeshServer() { case GRPC: BOOTSTRAP_LIST.add(new EventMeshGrpcBootstrap(this)); break; - default: - // nothing to do + default: // nothing to do } } @@ -115,6 +127,9 @@ public EventMeshServer() { BOOTSTRAP_LIST.add(new EventMeshTcpBootstrap(this)); } + // HTTP Admin Server always enabled + BOOTSTRAP_LIST.add(new EventMeshAdminBootstrap(this)); + List metricsPluginTypes = configuration.getEventMeshMetricsPluginType(); if (CollectionUtils.isNotEmpty(metricsPluginTypes)) { List metricsRegistries = metricsPluginTypes.stream().map(metric -> MetricsPluginFactory.getMetricsRegistry(metric)) @@ -147,6 +162,9 @@ public void init() throws Exception { if (eventMeshBootstrap instanceof EventMeshGrpcBootstrap) { eventMeshGrpcServer = ((EventMeshGrpcBootstrap) eventMeshBootstrap).getEventMeshGrpcServer(); } + if (eventMeshBootstrap instanceof EventMeshAdminBootstrap) { + eventMeshAdminServer = ((EventMeshAdminBootstrap) eventMeshBootstrap).getEventMeshAdminServer(); + } } if (Objects.nonNull(eventMeshTCPServer)) { @@ -168,12 +186,6 @@ public void init() throws Exception { eventMeshMetricsManager.init(); } - - if (Objects.nonNull(eventMeshTCPServer) && Objects.nonNull(eventMeshHTTPServer) && Objects.nonNull(eventMeshGrpcServer)) { - adminBootstrap = new EventMeshAdminBootstrap(this); - adminBootstrap.init(); - } - producerTopicManager = new ProducerTopicManager(this); producerTopicManager.init(); @@ -203,13 +215,10 @@ public void start() throws Exception { eventMeshBootstrap.start(); } - if (Objects.nonNull(adminBootstrap)) { - adminBootstrap.start(); - } producerTopicManager.start(); + serviceState = ServiceState.RUNNING; log.info(SERVER_STATE_MSG, serviceState); - } public void shutdown() throws Exception { @@ -235,48 +244,8 @@ public void shutdown() throws Exception { } producerTopicManager.shutdown(); ConfigurationContextUtil.clear(); - serviceState = ServiceState.STOPPED; + serviceState = ServiceState.STOPPED; log.info(SERVER_STATE_MSG, serviceState); } - - public static Trace getTrace() { - return trace; - } - - public ServiceState getServiceState() { - return serviceState; - } - - public MetaStorage getMetaStorage() { - return metaStorage; - } - - public void setMetaStorage(final MetaStorage metaStorage) { - this.metaStorage = metaStorage; - } - - public Acl getAcl() { - return acl; - } - - public ProducerTopicManager getProducerTopicManager() { - return producerTopicManager; - } - - public CommonConfiguration getConfiguration() { - return configuration; - } - - public EventMeshTCPServer getEventMeshTCPServer() { - return eventMeshTCPServer; - } - - public EventMeshGrpcServer getEventMeshGrpcServer() { - return eventMeshGrpcServer; - } - - public EventMeshHTTPServer getEventMeshHTTPServer() { - return eventMeshHTTPServer; - } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshTcpBootstrap.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshTcpBootstrap.java index 9cd665d28a..e098b203c4 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshTcpBootstrap.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/EventMeshTcpBootstrap.java @@ -23,8 +23,11 @@ import org.apache.eventmesh.common.utils.ConfigurationContextUtil; import org.apache.eventmesh.runtime.configuration.EventMeshTCPConfiguration; +import lombok.Getter; + public class EventMeshTcpBootstrap implements EventMeshBootstrap { + @Getter private EventMeshTCPServer eventMeshTcpServer; private final EventMeshTCPConfiguration eventMeshTcpConfiguration; @@ -63,13 +66,4 @@ public void shutdown() throws Exception { eventMeshTcpServer.shutdown(); } } - - public EventMeshTCPServer getEventMeshTcpServer() { - return eventMeshTcpServer; - } - - public void setEventMeshTcpServer(EventMeshTCPServer eventMeshTcpServer) { - this.eventMeshTcpServer = eventMeshTcpServer; - } - } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java index bf6eb9dadc..14677dc690 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/FilterEngine.java @@ -19,8 +19,8 @@ import org.apache.eventmesh.api.meta.MetaServiceListener; import org.apache.eventmesh.common.utils.JsonUtils; -import org.apache.eventmesh.filter.pattern.Pattern; -import org.apache.eventmesh.filter.patternbuild.PatternBuilder; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.patternbuild.PatternBuilder; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerGroupManager; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerManager; import org.apache.eventmesh.runtime.core.protocol.producer.EventMeshProducer; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/HTTPThreadPoolGroup.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/HTTPThreadPoolGroup.java index bf6c740b56..aa11788501 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/HTTPThreadPoolGroup.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/HTTPThreadPoolGroup.java @@ -23,17 +23,25 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; +import lombok.Getter; + public class HTTPThreadPoolGroup implements ThreadPoolGroup { private final EventMeshHTTPConfiguration eventMeshHttpConfiguration; + @Getter private ThreadPoolExecutor batchMsgExecutor; + @Getter private ThreadPoolExecutor sendMsgExecutor; + @Getter private ThreadPoolExecutor remoteMsgExecutor; + @Getter private ThreadPoolExecutor replyMsgExecutor; + @Getter private ThreadPoolExecutor pushMsgExecutor; + @Getter private ThreadPoolExecutor clientManageExecutor; - private ThreadPoolExecutor runtimeAdminExecutor; + @Getter private ThreadPoolExecutor webhookExecutor; public HTTPThreadPoolGroup(EventMeshHTTPConfiguration eventMeshHttpConfiguration) { @@ -73,12 +81,6 @@ public void initThreadPool() { new LinkedBlockingQueue<>(eventMeshHttpConfiguration.getEventMeshServerClientManageBlockQSize()), "eventMesh-clientManage", true); - // The runtimeAdminExecutor here is for the runtime.admin package. - runtimeAdminExecutor = ThreadPoolFactory.createThreadPoolExecutor( - eventMeshHttpConfiguration.getEventMeshServerAdminThreadNum(), - eventMeshHttpConfiguration.getEventMeshServerAdminThreadNum(), - new LinkedBlockingQueue<>(50), "eventMesh-runtime-admin", true); - replyMsgExecutor = ThreadPoolFactory.createThreadPoolExecutor( eventMeshHttpConfiguration.getEventMeshServerReplyMsgThreadNum(), eventMeshHttpConfiguration.getEventMeshServerReplyMsgThreadNum(), @@ -95,9 +97,6 @@ public void shutdownThreadPool() { if (batchMsgExecutor != null) { batchMsgExecutor.shutdown(); } - if (runtimeAdminExecutor != null) { - runtimeAdminExecutor.shutdown(); - } if (clientManageExecutor != null) { clientManageExecutor.shutdown(); } @@ -114,36 +113,4 @@ public void shutdownThreadPool() { replyMsgExecutor.shutdown(); } } - - public ThreadPoolExecutor getBatchMsgExecutor() { - return batchMsgExecutor; - } - - public ThreadPoolExecutor getSendMsgExecutor() { - return sendMsgExecutor; - } - - public ThreadPoolExecutor getRemoteMsgExecutor() { - return remoteMsgExecutor; - } - - public ThreadPoolExecutor getReplyMsgExecutor() { - return replyMsgExecutor; - } - - public ThreadPoolExecutor getPushMsgExecutor() { - return pushMsgExecutor; - } - - public ThreadPoolExecutor getClientManageExecutor() { - return clientManageExecutor; - } - - public ThreadPoolExecutor getRuntimeAdminExecutor() { - return runtimeAdminExecutor; - } - - public ThreadPoolExecutor getWebhookExecutor() { - return webhookExecutor; - } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java index 0f48220a4d..a0736b430a 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/SSLContextFactory.java @@ -17,6 +17,7 @@ package org.apache.eventmesh.runtime.boot; +import org.apache.eventmesh.runtime.configuration.EventMeshAdminConfiguration; import org.apache.eventmesh.runtime.configuration.EventMeshHTTPConfiguration; import org.apache.eventmesh.runtime.constants.EventMeshConstants; @@ -40,23 +41,19 @@ public class SSLContextFactory { - private static String protocol = "TLSv1.1"; - - private static String fileName; - - private static String password; - + /** + * {@link EventMeshAdminConfiguration} will be parsed into {@link EventMeshHTTPConfiguration}. + */ public static SSLContext getSslContext(final EventMeshHTTPConfiguration eventMeshHttpConfiguration) - throws NoSuchAlgorithmException, KeyStoreException, CertificateException, IOException, - UnrecoverableKeyException, KeyManagementException { + throws NoSuchAlgorithmException, KeyStoreException, CertificateException, IOException, UnrecoverableKeyException, KeyManagementException { + + String protocol = eventMeshHttpConfiguration.getEventMeshServerSSLProtocol(); + String fileName = eventMeshHttpConfiguration.getEventMeshServerSSLCer(); + String password = eventMeshHttpConfiguration.getEventMeshServerSSLPass(); SSLContext sslContext; - try (InputStream inputStream = Files.newInputStream(Paths.get(EventMeshConstants.EVENTMESH_CONF_HOME - + File.separator - + fileName), StandardOpenOption.READ)) { - protocol = eventMeshHttpConfiguration.getEventMeshServerSSLProtocol(); - fileName = eventMeshHttpConfiguration.getEventMeshServerSSLCer(); - password = eventMeshHttpConfiguration.getEventMeshServerSSLPass(); + try (InputStream inputStream = Files.newInputStream(Paths.get(EventMeshConstants.EVENTMESH_CONF_HOME + File.separator + fileName), + StandardOpenOption.READ)) { char[] filePass = StringUtils.isNotBlank(password) ? password.toCharArray() : new char[0]; final KeyStore keyStore = KeyStore.getInstance("JKS"); diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java index 551bcb2799..1d2f8ca30c 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/boot/TransformerEngine.java @@ -19,14 +19,14 @@ import org.apache.eventmesh.api.meta.MetaServiceListener; import org.apache.eventmesh.common.utils.JsonUtils; +import org.apache.eventmesh.function.transformer.Transformer; +import org.apache.eventmesh.function.transformer.TransformerBuilder; +import org.apache.eventmesh.function.transformer.TransformerParam; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerGroupManager; import org.apache.eventmesh.runtime.core.protocol.http.consumer.ConsumerManager; import org.apache.eventmesh.runtime.core.protocol.producer.EventMeshProducer; import org.apache.eventmesh.runtime.core.protocol.producer.ProducerManager; import org.apache.eventmesh.runtime.meta.MetaStorage; -import org.apache.eventmesh.transformer.Transformer; -import org.apache.eventmesh.transformer.TransformerBuilder; -import org.apache.eventmesh.transformer.TransformerParam; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshAdminConfiguration.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshAdminConfiguration.java new file mode 100644 index 0000000000..dff80eaaa2 --- /dev/null +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshAdminConfiguration.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.configuration; + +import org.apache.eventmesh.common.config.Config; +import org.apache.eventmesh.common.config.ConfigField; + +import java.util.Collections; +import java.util.List; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +import inet.ipaddr.IPAddress; + +@Data +@EqualsAndHashCode(callSuper = true) +@NoArgsConstructor +@Config(prefix = "eventMesh.server") +public class EventMeshAdminConfiguration extends EventMeshHTTPConfiguration { + + @ConfigField(field = "admin.http.port") + private int eventMeshServerAdminPort = 10106; + + @ConfigField(field = "admin.threads.num") + private int eventMeshServerAdminThreadNum = 2; + + @ConfigField(field = "admin.useTls.enabled") + private boolean eventMeshServerUseTls = false; + + @ConfigField(field = "admin.ssl.protocol") + private String eventMeshServerSSLProtocol = "TLSv1.3"; + + @ConfigField(field = "admin.ssl.cer") + private String eventMeshServerSSLCer = "admin-server.jks"; + + @ConfigField(field = "admin.ssl.pass") + private String eventMeshServerSSLPass = "eventmesh-admin-server"; + + @ConfigField(field = "admin.blacklist.ipv4") + private List eventMeshIpv4BlackList = Collections.emptyList(); // TODO implement after merging #4835 + + @ConfigField(field = "admin.blacklist.ipv6") + private List eventMeshIpv6BlackList = Collections.emptyList(); +} diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfiguration.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfiguration.java index cf08f27553..924a07ab01 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfiguration.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfiguration.java @@ -59,9 +59,6 @@ public class EventMeshGrpcConfiguration extends CommonConfiguration { @ConfigField(field = "metaStorage.threads.num") private int eventMeshServerMetaStorageThreadNum = 10; - @ConfigField(field = "admin.threads.num") - private int eventMeshServerAdminThreadNum = 2; - @ConfigField(field = "retry.threads.num") private int eventMeshServerRetryThreadNum = 2; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfiguration.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfiguration.java index b0fa18df4c..287c222245 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfiguration.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfiguration.java @@ -63,9 +63,6 @@ public class EventMeshHTTPConfiguration extends CommonConfiguration { @ConfigField(field = "metaStorage.threads.num") private int eventMeshServerMetaStorageThreadNum = 10; - @ConfigField(field = "admin.threads.num") - private int eventMeshServerAdminThreadNum = 2; - @ConfigField(field = "retry.threads.num") private int eventMeshServerRetryThreadNum = 2; @@ -131,5 +128,4 @@ public class EventMeshHTTPConfiguration extends CommonConfiguration { @ConfigField(field = "blacklist.ipv6") private List eventMeshIpv6BlackList = Collections.emptyList(); - } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfiguration.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfiguration.java index 891f986167..907d80f686 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfiguration.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfiguration.java @@ -109,9 +109,6 @@ public class EventMeshTCPConfiguration extends CommonConfiguration { @ConfigField(field = "tcp.RebalanceIntervalInMills") private Integer eventMeshTcpRebalanceIntervalInMills = 30 * 1000; - @ConfigField(field = "admin.http.port") - private int eventMeshServerAdminPort = 10106; - @ConfigField(field = "tcp.sendBack.enabled") private boolean eventMeshTcpSendBackEnabled = Boolean.TRUE; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/AdminMetricsProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/AdminMetricsProcessor.java index b3fbf0d6a8..9a8b369341 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/AdminMetricsProcessor.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/AdminMetricsProcessor.java @@ -18,7 +18,7 @@ package org.apache.eventmesh.runtime.core.protocol.http.processor; import org.apache.eventmesh.common.protocol.http.HttpCommand; -import org.apache.eventmesh.runtime.boot.EventMeshHTTPServer; +import org.apache.eventmesh.runtime.boot.EventMeshAdminServer; import org.apache.eventmesh.runtime.core.protocol.http.async.AsyncContext; import java.util.concurrent.Executor; @@ -31,7 +31,7 @@ @RequiredArgsConstructor public class AdminMetricsProcessor extends AbstractHttpRequestProcessor { - private final EventMeshHTTPServer eventMeshHTTPServer; + private final EventMeshAdminServer eventMeshAdminServer; @Override public void processRequest(ChannelHandlerContext ctx, AsyncContext asyncContext) throws Exception { @@ -39,6 +39,6 @@ public void processRequest(ChannelHandlerContext ctx, AsyncContext @Override public Executor executor() { - return eventMeshHTTPServer.getHttpThreadPoolGroup().getRuntimeAdminExecutor(); + return eventMeshAdminServer.getAdminMetricsExecutor(); } } diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java index b30238a28c..0e41d827ab 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/processor/SendAsyncEventProcessor.java @@ -31,7 +31,8 @@ import org.apache.eventmesh.common.utils.IPUtils; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.common.utils.RandomStringUtils; -import org.apache.eventmesh.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.transformer.Transformer; import org.apache.eventmesh.protocol.api.ProtocolAdaptor; import org.apache.eventmesh.protocol.api.ProtocolPluginFactory; import org.apache.eventmesh.runtime.acl.Acl; @@ -44,7 +45,6 @@ import org.apache.eventmesh.runtime.util.EventMeshUtil; import org.apache.eventmesh.runtime.util.RemotingHelper; import org.apache.eventmesh.trace.api.common.EventMeshTraceConstants; -import org.apache.eventmesh.transformer.Transformer; import org.apache.commons.lang3.StringUtils; diff --git a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java index be95971536..69506ede8a 100644 --- a/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java +++ b/eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/core/protocol/http/push/AsyncHTTPPushRequest.java @@ -30,14 +30,14 @@ import org.apache.eventmesh.common.utils.IPUtils; import org.apache.eventmesh.common.utils.JsonUtils; import org.apache.eventmesh.common.utils.RandomStringUtils; -import org.apache.eventmesh.filter.pattern.Pattern; +import org.apache.eventmesh.function.filter.pattern.Pattern; +import org.apache.eventmesh.function.transformer.Transformer; import org.apache.eventmesh.protocol.api.ProtocolAdaptor; import org.apache.eventmesh.protocol.api.ProtocolPluginFactory; import org.apache.eventmesh.runtime.constants.EventMeshConstants; import org.apache.eventmesh.runtime.core.protocol.http.consumer.HandleMsgContext; import org.apache.eventmesh.runtime.util.EventMeshUtil; import org.apache.eventmesh.runtime.util.WebhookUtil; -import org.apache.eventmesh.transformer.Transformer; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateFormatUtils; diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/boot/EventMeshServerTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/boot/EventMeshServerTest.java index c546d38e3b..83baf20fb9 100644 --- a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/boot/EventMeshServerTest.java +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/boot/EventMeshServerTest.java @@ -77,7 +77,6 @@ private void assertTCPConfig(EventMeshTCPConfiguration config) { Assertions.assertEquals(15816, config.getEventMeshTcpMsgRetryAsyncDelayInMills()); Assertions.assertEquals(16816, config.getEventMeshTcpMsgRetryQueueSize()); Assertions.assertEquals(Integer.valueOf(17816), config.getEventMeshTcpRebalanceIntervalInMills()); - Assertions.assertEquals(18816, config.getEventMeshServerAdminPort()); Assertions.assertEquals(Boolean.TRUE, config.isEventMeshTcpSendBackEnabled()); Assertions.assertEquals(3, config.getEventMeshTcpSendBackMaxTimes()); Assertions.assertEquals(21816, config.getEventMeshTcpPushFailIsolateTimeInMills()); diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfigurationTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfigurationTest.java index 6b206167dc..4dc59a2000 100644 --- a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfigurationTest.java +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshGrpcConfigurationTest.java @@ -50,7 +50,6 @@ private void assertGrpcConfig(EventMeshGrpcConfiguration config) { Assertions.assertEquals(5816, config.getEventMeshServerReplyMsgThreadNum()); Assertions.assertEquals(6816, config.getEventMeshServerSubscribeMsgThreadNum()); Assertions.assertEquals(7816, config.getEventMeshServerMetaStorageThreadNum()); - Assertions.assertEquals(8816, config.getEventMeshServerAdminThreadNum()); Assertions.assertEquals(9816, config.getEventMeshServerRetryThreadNum()); Assertions.assertEquals(11816, config.getEventMeshServerPullMetaStorageInterval()); Assertions.assertEquals(12816, config.getEventMeshServerAsyncAccumulationThreshold()); diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfigurationTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfigurationTest.java index d522ff5519..9e99bc511d 100644 --- a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfigurationTest.java +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshHTTPConfigurationTest.java @@ -53,7 +53,6 @@ private void assertHTTPConfig(EventMeshHTTPConfiguration config) throws AddressS Assertions.assertEquals(5816, config.getEventMeshServerReplyMsgThreadNum()); Assertions.assertEquals(6816, config.getEventMeshServerClientManageThreadNum()); Assertions.assertEquals(7816, config.getEventMeshServerMetaStorageThreadNum()); - Assertions.assertEquals(8816, config.getEventMeshServerAdminThreadNum()); Assertions.assertEquals(9816, config.getEventMeshServerRetryThreadNum()); Assertions.assertEquals(11816, config.getEventMeshServerPullMetaStorageInterval()); diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfigurationTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfigurationTest.java index 1501cf1b5d..2aee84ef72 100644 --- a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfigurationTest.java +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/configuration/EventMeshTCPConfigurationTest.java @@ -58,7 +58,6 @@ private void assertTCPConfig(EventMeshTCPConfiguration config) { Assertions.assertEquals(15816, config.getEventMeshTcpMsgRetryAsyncDelayInMills()); Assertions.assertEquals(16816, config.getEventMeshTcpMsgRetryQueueSize()); Assertions.assertEquals(Integer.valueOf(17816), config.getEventMeshTcpRebalanceIntervalInMills()); - Assertions.assertEquals(18816, config.getEventMeshServerAdminPort()); Assertions.assertEquals(Boolean.TRUE, config.isEventMeshTcpSendBackEnabled()); Assertions.assertEquals(3, config.getEventMeshTcpSendBackMaxTimes()); Assertions.assertEquals(21816, config.getEventMeshTcpPushFailIsolateTimeInMills()); diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/EventMeshCloudEventWriterTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/EventMeshCloudEventWriterTest.java new file mode 100644 index 0000000000..e7f5cfcebe --- /dev/null +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/EventMeshCloudEventWriterTest.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.util; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class EventMeshCloudEventWriterTest { + + @Test + public void testURIAsValueWithContextAttribute() throws URISyntaxException { + String key = "testKey"; + EventMeshCloudEventWriter eventMeshCloudEventWriter = new EventMeshCloudEventWriter(); + + eventMeshCloudEventWriter.withContextAttribute(key, new URI("file://foo/bars")); + + Map extensionMap = eventMeshCloudEventWriter.getExtensionMap(); + Assertions.assertEquals(extensionMap.get(key), "file://foo/bars"); + } + + @Test + public void testOffsetDateTimeAsValueWithContextAttribute() { + String key = "testKey"; + EventMeshCloudEventWriter eventMeshCloudEventWriter = new EventMeshCloudEventWriter(); + + eventMeshCloudEventWriter.withContextAttribute(key, OffsetDateTime.of(LocalDateTime.of( + LocalDate.of(2023, 6, 17), LocalTime.MIDNIGHT), ZoneOffset.ofTotalSeconds(32400))); + + Map extensionMap = eventMeshCloudEventWriter.getExtensionMap(); + Assertions.assertEquals(extensionMap.get(key), "2023-06-17T00:00:00+09:00"); + } + + @Test + public void testIntegerAsValueWithContextAttribute() { + String key = "testKey"; + EventMeshCloudEventWriter eventMeshCloudEventWriter = new EventMeshCloudEventWriter(); + + eventMeshCloudEventWriter.withContextAttribute(key, 123); + + Map extensionMap = eventMeshCloudEventWriter.getExtensionMap(); + Assertions.assertEquals(extensionMap.get(key), "123"); + } + + @Test + public void testBooleanAsValueWithContextAttribute() { + String key = "testKey"; + EventMeshCloudEventWriter eventMeshCloudEventWriter = new EventMeshCloudEventWriter(); + + eventMeshCloudEventWriter.withContextAttribute(key, Boolean.FALSE); + + Map extensionMap = eventMeshCloudEventWriter.getExtensionMap(); + Assertions.assertEquals(extensionMap.get(key), "false"); + } + + @Test + public void testByteArrayAsValueWithContextAttribute() { + String key = "testKey"; + EventMeshCloudEventWriter eventMeshCloudEventWriter = new EventMeshCloudEventWriter(); + + eventMeshCloudEventWriter.withContextAttribute(key, "bytesArray".getBytes(StandardCharsets.UTF_8)); + + Map extensionMap = eventMeshCloudEventWriter.getExtensionMap(); + String base64EncodedValue = "Ynl0ZXNBcnJheQ=="; + Assertions.assertEquals(extensionMap.get(key), base64EncodedValue); + } + +} diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/HttpRequestUtilTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/HttpRequestUtilTest.java new file mode 100644 index 0000000000..3d4c868dad --- /dev/null +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/HttpRequestUtilTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.util; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import io.netty.buffer.Unpooled; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpVersion; + +public class HttpRequestUtilTest { + + @Test + public void testShouldParseHttpGETRequestBody() throws IOException { + HttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/some-path?q1=xyz"); + Map expected = new HashMap<>(); + expected.put("q1", "xyz"); + Assertions.assertEquals(expected, HttpRequestUtil.parseHttpRequestBody(httpRequest)); + } + + @Test + public void testShouldParseHttpPOSTRequestBody() throws IOException { + HttpRequest httpRequest = new DefaultFullHttpRequest( + HttpVersion.HTTP_1_1, HttpMethod.POST, + "/some-path", + Unpooled.copiedBuffer(("q1=xyz").getBytes()) + ); + Map expected = new HashMap<>(); + expected.put("q1", "xyz"); + Assertions.assertEquals(expected, HttpRequestUtil.parseHttpRequestBody(httpRequest)); + } + + @Test + public void testQueryStringToMap() { + Map expected = new HashMap<>(); + expected.put("q1", "xyz"); + expected.put("q2", "abc"); + Assertions.assertEquals(expected, HttpRequestUtil.queryStringToMap("q1=xyz&q2=abc")); + } + + @Test + public void testGetQueryParam() { + HttpRequest httpRequest = new DefaultFullHttpRequest( + HttpVersion.HTTP_1_1, + HttpMethod.GET, + "/some-path?q1=xyz" + ); + Assertions.assertEquals("xyz", HttpRequestUtil.getQueryParam(httpRequest, "q1", "")); + } + + @Test + public void testGetBodyParam() throws IOException { + HttpRequest httpRequest = new DefaultFullHttpRequest( + HttpVersion.HTTP_1_1, + HttpMethod.POST, + "/some-path", + Unpooled.copiedBuffer(("q1=xyz").getBytes()) + ); + Assertions.assertEquals("xyz", HttpRequestUtil.getBodyParam(httpRequest, "q1")); + } + +} diff --git a/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/TraceUtilsTest.java b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/TraceUtilsTest.java new file mode 100644 index 0000000000..34989b6442 --- /dev/null +++ b/eventmesh-runtime/src/test/java/org/apache/eventmesh/runtime/util/TraceUtilsTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.runtime.util; + +import org.apache.eventmesh.runtime.boot.EventMeshServer; +import org.apache.eventmesh.runtime.mock.MockCloudEvent; +import org.apache.eventmesh.runtime.trace.Trace; + +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.MockedStatic; +import org.mockito.Mockito; + +import io.cloudevents.SpecVersion; +import io.opentelemetry.api.trace.Span; + +public class TraceUtilsTest { + @Test + public void testShouldPrepareClientSpan() throws Exception { + Map cloudEventExtensionMap = EventMeshUtil.getCloudEventExtensionMap(SpecVersion.V1.toString(), new MockCloudEvent()); + try (MockedStatic dummyStatic = Mockito.mockStatic(EventMeshServer.class)) { + Trace trace = Trace.getInstance("zipkin", true); + trace.init(); + dummyStatic.when(EventMeshServer::getTrace).thenReturn(trace); + Span testClientSpan = TraceUtils.prepareClientSpan( + cloudEventExtensionMap, + "test client span", + false + ); + Assertions.assertNotNull(testClientSpan); + } + } + + @Test + public void testShouldPrepareServerSpan() throws Exception { + Map cloudEventExtensionMap = EventMeshUtil.getCloudEventExtensionMap(SpecVersion.V1.toString(), new MockCloudEvent()); + try (MockedStatic dummyStatic = Mockito.mockStatic(EventMeshServer.class)) { + Trace trace = Trace.getInstance("zipkin", true); + trace.init(); + dummyStatic.when(EventMeshServer::getTrace).thenReturn(trace); + TraceUtils.prepareClientSpan( + cloudEventExtensionMap, + "test client span", + false + ); + Span testServerSpan = TraceUtils.prepareServerSpan( + cloudEventExtensionMap, + "test server span", + false + ); + Assertions.assertNotNull(testServerSpan); + } + } + + @Test + public void testShouldFinishSpan() throws Exception { + MockCloudEvent cloudEvent = new MockCloudEvent(); + Map cloudEventExtensionMap = EventMeshUtil.getCloudEventExtensionMap(SpecVersion.V1.toString(), cloudEvent); + try (MockedStatic dummyStatic = Mockito.mockStatic(EventMeshServer.class)) { + Trace trace = Trace.getInstance("zipkin", true); + trace.init(); + dummyStatic.when(EventMeshServer::getTrace).thenReturn(trace); + Span testClientSpan = TraceUtils.prepareClientSpan( + cloudEventExtensionMap, + "test client span", + false + ); + + TraceUtils.finishSpan(testClientSpan, cloudEvent); + Assertions.assertFalse(testClientSpan.isRecording()); + } + } +} diff --git a/eventmesh-runtime/src/test/resources/configuration.properties b/eventmesh-runtime/src/test/resources/configuration.properties index 70ff82e05c..836fc9c981 100644 --- a/eventmesh-runtime/src/test/resources/configuration.properties +++ b/eventmesh-runtime/src/test/resources/configuration.properties @@ -78,7 +78,6 @@ eventMesh.server.tcp.writerIdleSeconds=2816 eventMesh.server.tcp.readerIdleSeconds=3816 eventMesh.server.tcp.msgReqnumPerSecond=4816 eventMesh.server.tcp.clientMaxNum=5816 -eventMesh.server.tcp.enabled=true eventMesh.server.global.scheduler=6816 eventMesh.server.tcp.taskHandleExecutorPoolSize=7816 eventMesh.server.tcp.msgDownStreamExecutorPoolSize=8816 @@ -89,7 +88,6 @@ eventMesh.server.retry.sync.pushRetryDelayInMills=14816 eventMesh.server.retry.async.pushRetryDelayInMills=15816 eventMesh.server.retry.pushRetryQueueSize=16816 eventMesh.server.tcp.RebalanceIntervalInMills=17816 -eventMesh.server.admin.http.port=18816 eventMesh.server.tcp.sendBack.enabled=true eventMesh.server.tcp.pushFailIsolateTimeInMills=21816 eventMesh.server.gracefulShutdown.sleepIntervalInMills=22816 diff --git a/eventmesh-sdks/eventmesh-sdk-java/build.gradle b/eventmesh-sdks/eventmesh-sdk-java/build.gradle index d18fba37ea..be55c650a3 100644 --- a/eventmesh-sdks/eventmesh-sdk-java/build.gradle +++ b/eventmesh-sdks/eventmesh-sdk-java/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def grpcVersion = '1.64.0' +def grpcVersion = '1.68.0' dependencies { api(project(":eventmesh-common")) { @@ -54,7 +54,7 @@ dependencies { implementation "io.grpc:grpc-protobuf:${grpcVersion}" implementation "io.grpc:grpc-stub:${grpcVersion}" - implementation "com.google.protobuf:protobuf-java-util:3.21.5" + implementation "com.google.protobuf:protobuf-java-util:3.25.4" compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' diff --git a/eventmesh-security-plugin/eventmesh-security-auth-token/src/main/java/org/apache/eventmesh/auth/token/impl/auth/AuthTokenUtils.java b/eventmesh-security-plugin/eventmesh-security-auth-token/src/main/java/org/apache/eventmesh/auth/token/impl/auth/AuthTokenUtils.java index 41ceca5490..16005649f4 100644 --- a/eventmesh-security-plugin/eventmesh-security-auth-token/src/main/java/org/apache/eventmesh/auth/token/impl/auth/AuthTokenUtils.java +++ b/eventmesh-security-plugin/eventmesh-security-auth-token/src/main/java/org/apache/eventmesh/auth/token/impl/auth/AuthTokenUtils.java @@ -33,6 +33,7 @@ import java.security.NoSuchAlgorithmException; import java.security.spec.InvalidKeySpecException; import java.security.spec.X509EncodedKeySpec; +import java.util.Objects; import java.util.Set; import io.jsonwebtoken.Claims; @@ -51,41 +52,8 @@ public static void authTokenByPublicKey(AclProperties aclProperties) { throw new AclException("group:" + aclProperties.getExtendedField("group ") + " has no auth to access the topic:" + aclProperties.getTopic()); } - String publicKeyUrl = null; - token = token.replace("Bearer ", ""); - for (String key : ConfigurationContextUtil.KEYS) { - CommonConfiguration commonConfiguration = ConfigurationContextUtil.get(key); - if (commonConfiguration == null) { - continue; - } - if (StringUtils.isBlank(commonConfiguration.getEventMeshSecurityPublickey())) { - throw new AclException("publicKeyUrl cannot be null"); - } - publicKeyUrl = commonConfiguration.getEventMeshSecurityPublickey(); - } - byte[] validationKeyBytes = new byte[0]; - try { - validationKeyBytes = Files.readAllBytes(Paths.get(publicKeyUrl)); - X509EncodedKeySpec spec = new X509EncodedKeySpec(validationKeyBytes); - KeyFactory kf = KeyFactory.getInstance("RSA"); - Key validationKey = kf.generatePublic(spec); - JwtParser signedParser = Jwts.parserBuilder().setSigningKey(validationKey).build(); - Jwt signJwt = signedParser.parseClaimsJws(token); - String sub = signJwt.getBody().get("sub", String.class); - if (!sub.contains(aclProperties.getExtendedField("group").toString()) && !sub.contains("pulsar-admin")) { - throw new AclException("group:" + aclProperties.getExtendedField("group ") + " has no auth to access eventMesh:" - + aclProperties.getTopic()); - } - } catch (IOException e) { - throw new AclException("public key read error!", e); - } catch (NoSuchAlgorithmException e) { - throw new AclException("no such RSA algorithm!", e); - } catch (InvalidKeySpecException e) { - throw new AclException("invalid public key spec!", e); - } catch (JwtException e) { - throw new AclException("invalid token!", e); - } - + String publicKeyUrl = getPublicKeyUrl(); + validateToken(token, publicKeyUrl, aclProperties); } else { throw new AclException("invalid token!"); } @@ -94,40 +62,7 @@ public static void authTokenByPublicKey(AclProperties aclProperties) { public static void helloTaskAuthTokenByPublicKey(AclProperties aclProperties) { String token = aclProperties.getToken(); if (StringUtils.isNotBlank(token)) { - String publicKeyUrl = null; - token = token.replace("Bearer ", ""); - for (String key : ConfigurationContextUtil.KEYS) { - CommonConfiguration commonConfiguration = ConfigurationContextUtil.get(key); - if (commonConfiguration == null) { - continue; - } - if (StringUtils.isBlank(commonConfiguration.getEventMeshSecurityPublickey())) { - throw new AclException("publicKeyUrl cannot be null"); - } - publicKeyUrl = commonConfiguration.getEventMeshSecurityPublickey(); - } - byte[] validationKeyBytes = new byte[0]; - try { - validationKeyBytes = Files.readAllBytes(Paths.get(publicKeyUrl)); - X509EncodedKeySpec spec = new X509EncodedKeySpec(validationKeyBytes); - KeyFactory kf = KeyFactory.getInstance("RSA"); - Key validationKey = kf.generatePublic(spec); - JwtParser signedParser = Jwts.parserBuilder().setSigningKey(validationKey).build(); - Jwt signJwt = signedParser.parseClaimsJws(token); - String sub = signJwt.getBody().get("sub", String.class); - if (!sub.contains(aclProperties.getExtendedField("group").toString()) && !sub.contains("pulsar-admin")) { - throw new AclException("group:" + aclProperties.getExtendedField("group ") + " has no auth to access eventMesh:" - + aclProperties.getTopic()); - } - } catch (IOException e) { - throw new AclException("public key read error!", e); - } catch (NoSuchAlgorithmException e) { - throw new AclException("no such RSA algorithm!", e); - } catch (InvalidKeySpecException e) { - throw new AclException("invalid public key spec!", e); - } catch (JwtException e) { - throw new AclException("invalid token!", e); - } + validateToken(token, getPublicKeyUrl(), aclProperties); } else { throw new AclException("invalid token!"); } @@ -148,4 +83,45 @@ public static boolean authAccess(AclProperties aclProperties) { return groupTopics.contains(topic); } + private static String getPublicKeyUrl() { + String publicKeyUrl = null; + for (String key : ConfigurationContextUtil.KEYS) { + CommonConfiguration commonConfiguration = ConfigurationContextUtil.get(key); + if (null == commonConfiguration) { + continue; + } + if (StringUtils.isBlank(commonConfiguration.getEventMeshSecurityPublickey())) { + throw new AclException("publicKeyUrl cannot be null"); + } + publicKeyUrl = commonConfiguration.getEventMeshSecurityPublickey(); + } + return publicKeyUrl; + } + + private static void validateToken(String token, String publicKeyUrl, AclProperties aclProperties) { + String sub; + token = token.replace("Bearer ", ""); + byte[] validationKeyBytes; + try { + validationKeyBytes = Files.readAllBytes(Paths.get(Objects.requireNonNull(publicKeyUrl))); + X509EncodedKeySpec spec = new X509EncodedKeySpec(validationKeyBytes); + KeyFactory kf = KeyFactory.getInstance("RSA"); + Key validationKey = kf.generatePublic(spec); + JwtParser signedParser = Jwts.parserBuilder().setSigningKey(validationKey).build(); + Jwt signJwt = signedParser.parseClaimsJws(token); + sub = signJwt.getBody().get("sub", String.class); + if (!sub.contains(aclProperties.getExtendedField("group").toString()) && !sub.contains("pulsar-admin")) { + throw new AclException("group:" + aclProperties.getExtendedField("group ") + " has no auth to access eventMesh:" + + aclProperties.getTopic()); + } + } catch (IOException e) { + throw new AclException("public key read error!", e); + } catch (NoSuchAlgorithmException e) { + throw new AclException("no such RSA algorithm!", e); + } catch (InvalidKeySpecException e) { + throw new AclException("invalid public key spec!", e); + } catch (JwtException e) { + throw new AclException("invalid token!", e); + } + } } diff --git a/eventmesh-spi/src/main/java/org/apache/eventmesh/spi/EventMeshExtensionType.java b/eventmesh-spi/src/main/java/org/apache/eventmesh/spi/EventMeshExtensionType.java index f76379f9e8..8de4e1ecfd 100644 --- a/eventmesh-spi/src/main/java/org/apache/eventmesh/spi/EventMeshExtensionType.java +++ b/eventmesh-spi/src/main/java/org/apache/eventmesh/spi/EventMeshExtensionType.java @@ -26,6 +26,7 @@ public enum EventMeshExtensionType { CONNECTOR("connector"), STORAGE("storage"), META("metaStorage"), + REGISTRY("registryCenter"), SECURITY("security"), PROTOCOL("protocol"), METRICS("metrics"), diff --git a/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle index 0eafe94636..c9064cdef4 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-kafka/build.gradle @@ -22,9 +22,9 @@ dependencies { implementation group: 'io.cloudevents', name: 'cloudevents-kafka', version: '2.5.0' // https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients - implementation 'org.apache.kafka:kafka-clients:3.6.2' + implementation 'org.apache.kafka:kafka-clients:3.8.1' - testImplementation 'org.junit.jupiter:junit-jupiter:5.6.0' + testImplementation 'org.junit.jupiter:junit-jupiter' testImplementation project(":eventmesh-storage-plugin:eventmesh-storage-api") testImplementation project(":eventmesh-common") diff --git a/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle index 8ca1ec8f8a..41eb93965e 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-rabbitmq/build.gradle @@ -19,12 +19,12 @@ dependencies { implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") implementation project(":eventmesh-common") // rabbitmq - implementation 'com.rabbitmq:amqp-client:5.21.0' + implementation 'com.rabbitmq:amqp-client:5.22.0' testImplementation project(":eventmesh-storage-plugin:eventmesh-storage-api") testImplementation project(":eventmesh-common") // rabbitmq - testImplementation 'com.rabbitmq:amqp-client:5.21.0' + testImplementation 'com.rabbitmq:amqp-client:5.22.0' implementation 'io.cloudevents:cloudevents-json-jackson' testImplementation 'io.cloudevents:cloudevents-json-jackson' diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle index bec0767638..4fb1645414 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/build.gradle @@ -20,24 +20,16 @@ dependencies { implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") // redisson - implementation('org.redisson:redisson:3.17.3') { - exclude group: 'io.netty', module: 'netty-common' - exclude group: 'io.netty', module: 'netty-buffer' - exclude group: 'io.netty', module: 'netty-codec' - exclude group: 'io.netty', module: 'netty-transport' - exclude group: 'io.netty', module: 'netty-resolver' - exclude group: 'io.netty', module: 'netty-resolver-dns' - exclude group: 'io.netty', module: 'netty-handler' - } + implementation 'org.redisson:redisson:3.38.1' // netty - implementation "io.netty:netty-all" + implementation 'io.netty:netty-all' // auxiliary serialize api 'io.cloudevents:cloudevents-json-jackson' // test dependencies - testImplementation 'ai.grakn:redis-mock:0.1.6' + testImplementation 'com.github.fppt:jedis-mock:1.1.3' testImplementation "org.mockito:mockito-core" compileOnly 'org.projectlombok:lombok' diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/gradle.properties b/eventmesh-storage-plugin/eventmesh-storage-redis/gradle.properties index 899c915a54..6f2a7b3f8b 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/gradle.properties +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/gradle.properties @@ -1,3 +1,4 @@ +# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/client/RedissonClient.java b/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/client/RedissonClient.java index 69fb37b922..9a02064747 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/client/RedissonClient.java +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/client/RedissonClient.java @@ -73,18 +73,15 @@ private static Redisson create(RedisProperties properties) { throw new StorageRuntimeException(message, ie); } + Config config = new Config(); + config.setCodec(CloudEventCodec.getInstance()); + config.setThreads(properties.getRedissonThreads()); + config.setNettyThreads(properties.getRedissonNettyThreads()); + String serverAddress = properties.getServerAddress(); String serverPassword = properties.getServerPassword(); String masterName = properties.getServerMasterName(); - Config config = OBJECT_MAPPER.convertValue(properties.getRedissonProperties(), Config.class); - - if (config == null) { - config = new Config(); - } - - config.setCodec(CloudEventCodec.getInstance()); - switch (serverType) { case SINGLE: config.useSingleServer() diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/config/RedisProperties.java b/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/config/RedisProperties.java index a478e7a70f..b1e9f1a3af 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/config/RedisProperties.java +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/java/org/apache/eventmesh/storage/redis/config/RedisProperties.java @@ -20,8 +20,6 @@ import org.apache.eventmesh.common.config.Config; import org.apache.eventmesh.common.config.ConfigField; -import java.util.Properties; - import lombok.Data; @Data @@ -44,7 +42,7 @@ public class RedisProperties { * The address of the redis server following format -- host1:port1,host2:port2,…… */ @ConfigField(field = "serverAddress") - private String serverAddress; + private String serverAddress = "redis://127.0.0.1:6379"; /** * The password for redis authentication. @@ -55,8 +53,11 @@ public class RedisProperties { /** * The redisson options, redisson properties prefix is `eventMesh.server.redis.redisson` */ - @ConfigField(field = "redisson") - private Properties redissonProperties; + @ConfigField(field = "redisson.threads") + private int redissonThreads = 16; + + @ConfigField(field = "redisson.nettyThreads") + private int redissonNettyThreads = 32; public enum ServerType { SINGLE, diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/resources/redis-client.properties b/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/resources/redis-client.properties index e024dca8ac..9baf41f360 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/resources/redis-client.properties +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/src/main/resources/redis-client.properties @@ -15,5 +15,9 @@ # limitations under the License. # -eventMesh.server.redis.serverAddress= +eventMesh.server.redis.serverAddress=redis://127.0.0.1:6379 eventMesh.server.redis.serverPassword= +eventMesh.server.redis.serverType=SINGLE +eventMesh.server.redis.serverMasterName=master +eventMesh.server.redis.redisson.threads= +eventMesh.server.redis.redisson.nettyThreads= diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/AbstractRedisServer.java b/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/AbstractRedisServer.java index 7aab9737d4..cefca9ea4b 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/AbstractRedisServer.java +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/AbstractRedisServer.java @@ -17,25 +17,19 @@ package org.apache.eventmesh.storage.redis; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import java.io.IOException; -import ai.grakn.redismock.RedisServer; +import com.github.fppt.jedismock.RedisServer; public abstract class AbstractRedisServer { - private static RedisServer redisServer; + private static final RedisServer redisServer; - @BeforeAll - public static void setupRedisServer() throws Exception { - redisServer = RedisServer.newRedisServer(6379); - redisServer.start(); - } - - @AfterAll - public static void shutdownRedisServer() { - if (redisServer != null) { - redisServer.stop(); + static { + try { + redisServer = RedisServer.newRedisServer(6379).start(); + } catch (IOException e) { + throw new RuntimeException(e); } } } diff --git a/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/config/RedisPropertiesTest.java b/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/config/RedisPropertiesTest.java index 677dc406e5..c284fb89da 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/config/RedisPropertiesTest.java +++ b/eventmesh-storage-plugin/eventmesh-storage-redis/src/test/java/org/apache/eventmesh/storage/redis/config/RedisPropertiesTest.java @@ -19,8 +19,6 @@ import org.apache.eventmesh.common.config.ConfigService; -import java.util.Properties; - import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -37,11 +35,7 @@ private void assertConfig(RedisProperties config) { Assertions.assertEquals("redis://127.0.0.1:6379", config.getServerAddress()); Assertions.assertEquals(RedisProperties.ServerType.SINGLE, config.getServerType()); Assertions.assertEquals("serverMasterName-success!!!", config.getServerMasterName()); - - Properties properties = new Properties(); - properties.put("threads", "2"); - properties.put("nettyThreads", "2"); - Properties redissonProperties = config.getRedissonProperties(); - Assertions.assertEquals(properties, redissonProperties); + Assertions.assertEquals(2, config.getRedissonThreads()); + Assertions.assertEquals(2, config.getRedissonNettyThreads()); } } diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/build.gradle b/eventmesh-storage-plugin/eventmesh-storage-standalone/build.gradle index a8b1827aa7..22271fb57d 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/build.gradle +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/build.gradle @@ -18,6 +18,7 @@ dependencies { implementation project(":eventmesh-common") implementation project(":eventmesh-storage-plugin:eventmesh-storage-api") + implementation "com.lmax:disruptor" compileOnly 'org.projectlombok:lombok' annotationProcessor 'org.projectlombok:lombok' diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdmin.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdmin.java index 7f5ab2da67..72257647ad 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdmin.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdmin.java @@ -19,7 +19,7 @@ import org.apache.eventmesh.api.admin.AbstractAdmin; import org.apache.eventmesh.api.admin.TopicProperties; -import org.apache.eventmesh.storage.standalone.broker.MessageQueue; +import org.apache.eventmesh.storage.standalone.broker.Channel; import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; import org.apache.eventmesh.storage.standalone.broker.model.TopicMetadata; @@ -42,11 +42,11 @@ public StandaloneAdmin() { @Override public List getTopic() throws Exception { - ConcurrentHashMap messageContainer = this.standaloneBroker.getMessageContainer(); + ConcurrentHashMap messageContainer = this.standaloneBroker.getMessageContainer(); List topicList = new ArrayList<>(); messageContainer.keySet().forEach(topicMetadata -> { - MessageQueue messageQueue = messageContainer.get(topicMetadata); - final int messageCount = messageQueue.getPutIndex() - messageQueue.getTakeIndex(); + Channel channel = messageContainer.get(topicMetadata); + final int messageCount = channel.getMessageCount(); topicList.add(new TopicProperties( topicMetadata.getTopicName(), messageCount)); @@ -65,25 +65,7 @@ public void deleteTopic(String topicName) { standaloneBroker.deleteTopicIfExist(topicName); } - @Override - public List getEvent(String topicName, int offset, int length) throws Exception { - if (!this.standaloneBroker.checkTopicExist(topicName)) { - throw new Exception("The topic name doesn't exist in the message queue"); - } - ConcurrentHashMap messageContainer = this.standaloneBroker.getMessageContainer(); - long topicOffset = messageContainer.get(new TopicMetadata(topicName)).getTakeIndex(); - List messageList = new ArrayList<>(); - for (int index = 0; index < length; index++) { - long messageOffset = topicOffset + offset + index; - CloudEvent event = this.standaloneBroker.getMessage(topicName, messageOffset); - if (event == null) { - break; - } - messageList.add(event); - } - return messageList; - } @Override public void publish(CloudEvent cloudEvent) throws Exception { diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/Channel.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/Channel.java new file mode 100644 index 0000000000..2ea7310b83 --- /dev/null +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/Channel.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.storage.standalone.broker; + +import org.apache.eventmesh.api.LifeCycle; +import org.apache.eventmesh.common.EventMeshThreadFactory; +import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; +import org.apache.eventmesh.storage.standalone.broker.model.TopicMetadata; +import org.apache.eventmesh.storage.standalone.broker.provider.DisruptorProvider; + +import com.lmax.disruptor.BlockingWaitStrategy; +import com.lmax.disruptor.EventHandler; +import com.lmax.disruptor.IgnoreExceptionHandler; +import com.lmax.disruptor.RingBuffer; +import com.lmax.disruptor.dsl.Disruptor; +import com.lmax.disruptor.dsl.ProducerType; + +import lombok.Getter; + + +public class Channel implements LifeCycle { + + public static final Integer DEFAULT_SIZE = 4096 << 1 << 1; + @Getter + private DisruptorProvider provider; + private final Integer size; + private final EventHandler eventHandler; + private volatile boolean started = false; + private final TopicMetadata topic; + private static final String THREAD_NAME_PREFIX = "standalone_disruptor_provider_"; + + public Channel(TopicMetadata topic, EventHandler eventHandler) { + this(DEFAULT_SIZE, topic, eventHandler); + } + + + public Channel(final Integer ringBufferSize, final TopicMetadata topic, final EventHandler eventHandler) { + this.size = ringBufferSize; + this.topic = topic; + this.eventHandler = eventHandler; + } + + + @Override + public boolean isStarted() { + return started; + } + + @Override + public boolean isClosed() { + return !isStarted(); + } + + public synchronized void start() { + if (isClosed()) { + doStart(); + started = true; + } + } + + public void doStart() { + Disruptor disruptor = new Disruptor<>( + MessageEntity::new, + size, + new EventMeshThreadFactory(THREAD_NAME_PREFIX + topic.getTopicName(), true), + ProducerType.MULTI, + new BlockingWaitStrategy() + ); + + disruptor.handleEventsWith(eventHandler); + disruptor.setDefaultExceptionHandler(new IgnoreExceptionHandler()); + RingBuffer ringBuffer = disruptor.getRingBuffer(); + provider = new DisruptorProvider(ringBuffer, disruptor); + provider.start(); + } + + public int getMessageCount() { + return provider.getMessageCount(); + } + + @Override + public synchronized void shutdown() { + if (isStarted()) { + provider.shutdown(); + provider = null; + started = false; + } + } + +} \ No newline at end of file diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBroker.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBroker.java index 5e64b40a70..8654b2d1c3 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBroker.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBroker.java @@ -19,42 +19,36 @@ import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; import org.apache.eventmesh.storage.standalone.broker.model.TopicMetadata; -import org.apache.eventmesh.storage.standalone.broker.task.HistoryMessageClear; -import org.apache.eventmesh.storage.standalone.broker.task.HistoryMessageClearTask; - -import org.apache.commons.lang3.tuple.Pair; +import org.apache.eventmesh.storage.standalone.broker.task.Subscribe; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicLong; import io.cloudevents.CloudEvent; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + /** * This broker used to store event, it just support standalone mode, you shouldn't use this module in production environment */ +@Slf4j public class StandaloneBroker { - private final ConcurrentHashMap messageContainer; + // message source by topic + @Getter + private final ConcurrentHashMap messageContainer; - // todo: move the offset manage to consumer - private final ConcurrentHashMap offsetMap; + @Getter + private final ConcurrentHashMap subscribeContainer; private StandaloneBroker() { this.messageContainer = new ConcurrentHashMap<>(); - this.offsetMap = new ConcurrentHashMap<>(); - startHistoryMessageCleanTask(); - } - - public ConcurrentHashMap getMessageContainer() { - return this.messageContainer; + this.subscribeContainer = new ConcurrentHashMap<>(); } - public ConcurrentHashMap getOffsetMap() { - return this.offsetMap; - } public static StandaloneBroker getInstance() { - return StandaloneBrokerInstanceHolder.instance; + return StandaloneBrokerInstanceHolder.INSTANCE; } /** @@ -62,28 +56,38 @@ public static StandaloneBroker getInstance() { * * @param topicName topic name * @param message message - * @throws InterruptedException */ - public MessageEntity putMessage(String topicName, CloudEvent message) throws InterruptedException { - Pair pair = createTopicIfAbsent(topicName); - AtomicLong topicOffset = pair.getRight(); - MessageQueue messageQueue = pair.getLeft(); - - MessageEntity messageEntity = new MessageEntity( - new TopicMetadata(topicName), message, topicOffset.getAndIncrement(), System.currentTimeMillis()); - messageQueue.put(messageEntity); - + public MessageEntity putMessage(String topicName, CloudEvent message) { + TopicMetadata topicMetadata = new TopicMetadata(topicName); + if (!messageContainer.containsKey(topicMetadata)) { + createTopic(topicName); + } + Channel channel = messageContainer.get(topicMetadata); + MessageEntity messageEntity = new MessageEntity(new TopicMetadata(topicName), message); + channel.getProvider().onData(messageEntity); return messageEntity; } + public Channel createTopic(String topicName) { + TopicMetadata topicMetadata = new TopicMetadata(topicName); + return messageContainer.computeIfAbsent(topicMetadata, k -> { + Subscribe subscribe = subscribeContainer.get(topicMetadata); + if (subscribe == null) { + throw new IllegalStateException("the topic not exist subscribe "); + } + Channel channel = new Channel(topicMetadata, subscribe); + channel.start(); + return channel; + }); + } + /** * Get the message, if the queue is empty then await * * @param topicName */ public CloudEvent takeMessage(String topicName) throws InterruptedException { - TopicMetadata topicMetadata = new TopicMetadata(topicName); - return messageContainer.computeIfAbsent(topicMetadata, k -> new MessageQueue()).take().getMessage(); + return null; } /** @@ -92,12 +96,7 @@ public CloudEvent takeMessage(String topicName) throws InterruptedException { * @param topicName */ public CloudEvent getMessage(String topicName) { - TopicMetadata topicMetadata = new TopicMetadata(topicName); - MessageEntity head = messageContainer.computeIfAbsent(topicMetadata, k -> new MessageQueue()).getHead(); - if (head == null) { - return null; - } - return head.getMessage(); + return null; } /** @@ -108,21 +107,9 @@ public CloudEvent getMessage(String topicName) { * @return CloudEvent */ public CloudEvent getMessage(String topicName, long offset) { - TopicMetadata topicMetadata = new TopicMetadata(topicName); - MessageEntity messageEntity = messageContainer.computeIfAbsent(topicMetadata, k -> new MessageQueue()).getByOffset(offset); - if (messageEntity == null) { - return null; - } - return messageEntity.getMessage(); + return null; } - private void startHistoryMessageCleanTask() { - HistoryMessageClear historyMessageClear = new HistoryMessageClear(messageContainer); - Thread thread = new Thread(new HistoryMessageClearTask(historyMessageClear)); - thread.setDaemon(true); - thread.setName("StandaloneBroker-HistoryMessageCleanTask"); - thread.start(); - } public boolean checkTopicExist(String topicName) { return messageContainer.containsKey(new TopicMetadata(topicName)); @@ -132,13 +119,10 @@ public boolean checkTopicExist(String topicName) { * if the topic does not exist, create the topic * * @param topicName topicName - * @return messageQueue and offset + * @return Channel */ - public Pair createTopicIfAbsent(String topicName) { - TopicMetadata topicMetadata = new TopicMetadata(topicName); - MessageQueue messageQueue = messageContainer.computeIfAbsent(topicMetadata, k -> new MessageQueue()); - AtomicLong offset = offsetMap.computeIfAbsent(topicMetadata, k -> new AtomicLong()); - return Pair.of(messageQueue, offset); + public Channel createTopicIfAbsent(String topicName) { + return createTopic(topicName); } /** @@ -148,18 +132,23 @@ public Pair createTopicIfAbsent(String topicName) { */ public void deleteTopicIfExist(String topicName) { TopicMetadata topicMetadata = new TopicMetadata(topicName); + Channel channel = createTopicIfAbsent(topicName); + channel.shutdown(); messageContainer.remove(topicMetadata); } - public void updateOffset(TopicMetadata topicMetadata, long offset) { - offsetMap.computeIfPresent(topicMetadata, (k, v) -> { - v.set(offset); - return v; - }); + public void subscribed(String topicName, Subscribe subscribe) { + TopicMetadata topicMetadata = new TopicMetadata(topicName); + if (getMessageContainer().containsKey(topicMetadata)) { + log.warn("the topic already subscribed"); + return; + } + subscribeContainer.put(topicMetadata, subscribe); } + private static class StandaloneBrokerInstanceHolder { - private static final StandaloneBroker instance = new StandaloneBroker(); + private static final StandaloneBroker INSTANCE = new StandaloneBroker(); } -} +} \ No newline at end of file diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/model/MessageEntity.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/model/MessageEntity.java index 0f437aee04..3662b30255 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/model/MessageEntity.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/model/MessageEntity.java @@ -21,6 +21,9 @@ import io.cloudevents.CloudEvent; +import lombok.NoArgsConstructor; + +@NoArgsConstructor public class MessageEntity implements Serializable { private static final long serialVersionUID = 6646148767540524786L; @@ -40,6 +43,11 @@ public MessageEntity(TopicMetadata topicMetadata, CloudEvent message, long offse this.createTimeMills = currentTimeMills; } + public MessageEntity(TopicMetadata topicMetadata, CloudEvent message) { + this.topicMetadata = topicMetadata; + this.message = message; + } + public TopicMetadata getTopicMetadata() { return topicMetadata; } diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/provider/DisruptorProvider.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/provider/DisruptorProvider.java new file mode 100644 index 0000000000..47b2665a2c --- /dev/null +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/provider/DisruptorProvider.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eventmesh.storage.standalone.broker.provider; + +import org.apache.eventmesh.api.LifeCycle; +import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; + +import com.lmax.disruptor.EventTranslatorOneArg; +import com.lmax.disruptor.RingBuffer; +import com.lmax.disruptor.dsl.Disruptor; + +import lombok.extern.slf4j.Slf4j; + +/** + * DisruptorProvider. disruptor provider definition. + */ +@Slf4j +public class DisruptorProvider implements LifeCycle { + + private final RingBuffer ringBuffer; + + private final Disruptor disruptor; + + private volatile boolean start = false; + + private final EventTranslatorOneArg translatorOneArg = (messageEntity, sequence, arg0) -> { + arg0.setOffset(sequence); + arg0.setCreateTimeMills(System.currentTimeMillis()); + messageEntity.setOffset(arg0.getOffset()); + messageEntity.setCreateTimeMills(arg0.getCreateTimeMills()); + messageEntity.setTopicMetadata(arg0.getTopicMetadata()); + messageEntity.setMessage(arg0.getMessage()); + }; + + + /** + * Instantiates a new Disruptor provider. + * + * @param ringBuffer the ring buffer + * @param disruptor the disruptor + */ + public DisruptorProvider(final RingBuffer ringBuffer, final Disruptor disruptor) { + this.ringBuffer = ringBuffer; + this.disruptor = disruptor; + } + + /** + * @param data the data + */ + public MessageEntity onData(final MessageEntity data) { + if (isClosed()) { + throw new IllegalArgumentException("the disruptor is close"); + } + try { + ringBuffer.publishEvent(translatorOneArg, data); + } catch (Exception ex) { + throw new IllegalStateException("send data fail."); + } + return data; + } + + + @Override + public boolean isStarted() { + return start; + } + + @Override + public boolean isClosed() { + return !isStarted(); + } + + @Override + public void start() { + if (null != disruptor) { + disruptor.start(); + start = true; + } + } + + /** + * Shutdown. + */ + public void shutdown() { + if (null != disruptor) { + disruptor.shutdown(); + start = false; + } + } + + public int getMessageCount() { + return ringBuffer.getBufferSize(); + } +} \ No newline at end of file diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/Subscribe.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/Subscribe.java index 8316270adf..4c84849ac7 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/Subscribe.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/Subscribe.java @@ -21,16 +21,19 @@ import org.apache.eventmesh.api.EventMeshAction; import org.apache.eventmesh.api.EventMeshAsyncConsumeContext; import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; +import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; -import java.util.concurrent.atomic.AtomicInteger; import io.cloudevents.CloudEvent; +import com.lmax.disruptor.EventHandler; +import com.lmax.disruptor.WorkHandler; + import lombok.Getter; import lombok.extern.slf4j.Slf4j; @Slf4j -public class Subscribe { +public class Subscribe implements WorkHandler, EventHandler { @Getter private final String topicName; @@ -38,8 +41,6 @@ public class Subscribe { private final EventListener listener; @Getter private volatile boolean isRunning; - @Getter - private AtomicInteger offset; public Subscribe(String topicName, StandaloneBroker standaloneBroker, @@ -51,52 +52,50 @@ public Subscribe(String topicName, } public void subscribe() { + standaloneBroker.subscribed(topicName, this); + } + + public void shutdown() { + isRunning = false; + standaloneBroker.deleteTopicIfExist(topicName); + } + + @Override + public void onEvent(MessageEntity event, long sequence, boolean endOfBatch) { + onEvent(event); + } + + @Override + public void onEvent(MessageEntity event) { try { - log.debug("execute subscribe task, topic: {}, offset: {}", topicName, offset); - if (offset == null) { - CloudEvent message = standaloneBroker.getMessage(topicName); - if (message != null) { - Object tmpOffset = message.getExtension("offset"); - if (tmpOffset instanceof Integer) { - offset = new AtomicInteger(Integer.parseInt(tmpOffset.toString())); - } else { - offset = new AtomicInteger(0); - } - } + if (!isRunning) { + return; } - if (offset != null) { - CloudEvent message = standaloneBroker.getMessage(topicName, offset.get()); - if (message != null) { - EventMeshAsyncConsumeContext consumeContext = new EventMeshAsyncConsumeContext() { + CloudEvent message = event.getMessage(); + if (message != null) { + EventMeshAsyncConsumeContext consumeContext = new EventMeshAsyncConsumeContext() { - @Override - public void commit(EventMeshAction action) { - switch (action) { - case CommitMessage: - // update offset - log.info("message commit, topic: {}, current offset:{}", topicName, offset.get()); - break; - case ManualAck: - // update offset - offset.incrementAndGet(); - log.info("message ack, topic: {}, current offset:{}", topicName, offset.get()); - break; - case ReconsumeLater: - default: - - } + @Override + public void commit(EventMeshAction action) { + switch (action) { + case CommitMessage: + // update offset + log.info("message commit, topic: {}, current offset:{}", topicName, event.getOffset()); + break; + case ManualAck: + // update offset + log.info("message ack, topic: {}, current offset:{}", topicName, event.getOffset()); + break; + case ReconsumeLater: + default: } - }; - listener.consume(message, consumeContext); - } + } + }; + listener.consume(message, consumeContext); } } catch (Exception ex) { - log.error("consumer error, topic: {}, offset: {}", topicName, offset == null ? null : offset.get(), ex); + log.error("consumer error, topic: {}, offset: {}", topicName, event.getOffset(), ex); } } - public void shutdown() { - isRunning = false; - } - -} +} \ No newline at end of file diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTask.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTask.java deleted file mode 100644 index 0936c79257..0000000000 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTask.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.eventmesh.storage.standalone.broker.task; - -import org.apache.eventmesh.common.utils.ThreadUtils; - -import java.util.concurrent.TimeUnit; - -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class SubscribeTask implements Runnable { - - private Subscribe subscribe; - - public SubscribeTask(Subscribe subscribe) { - this.subscribe = subscribe; - } - - @Override - public void run() { - while (subscribe.isRunning()) { - subscribe.subscribe(); - try { - ThreadUtils.sleepWithThrowException(1, TimeUnit.SECONDS); - } catch (InterruptedException e) { - log.error("Thread is interrupted, topic: {}, offset: {} thread name: {}", - subscribe.getTopicName(), - subscribe.getOffset() == null ? null : subscribe.getOffset().get(), - Thread.currentThread().getName(), e); - Thread.currentThread().interrupt(); - } - } - } - -} diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/consumer/StandaloneConsumer.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/consumer/StandaloneConsumer.java index 9eb753e3fa..edb66703f7 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/consumer/StandaloneConsumer.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/main/java/org/apache/eventmesh/storage/standalone/consumer/StandaloneConsumer.java @@ -20,17 +20,12 @@ import org.apache.eventmesh.api.AbstractContext; import org.apache.eventmesh.api.EventListener; import org.apache.eventmesh.api.consumer.Consumer; -import org.apache.eventmesh.common.ThreadPoolFactory; import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; -import org.apache.eventmesh.storage.standalone.broker.model.TopicMetadata; import org.apache.eventmesh.storage.standalone.broker.task.Subscribe; -import org.apache.eventmesh.storage.standalone.broker.task.SubscribeTask; import java.util.List; -import java.util.Objects; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import io.cloudevents.CloudEvent; @@ -45,16 +40,10 @@ public class StandaloneConsumer implements Consumer { private final ConcurrentHashMap subscribeTable; - private final ExecutorService consumeExecutorService; - public StandaloneConsumer(Properties properties) { this.standaloneBroker = StandaloneBroker.getInstance(); this.subscribeTable = new ConcurrentHashMap<>(16); this.isStarted = new AtomicBoolean(false); - this.consumeExecutorService = ThreadPoolFactory.createThreadPoolExecutor( - Runtime.getRuntime().availableProcessors() * 2, - Runtime.getRuntime().availableProcessors() * 2, - "StandaloneConsumerThread"); } @Override @@ -86,8 +75,6 @@ public void init(Properties keyValue) throws Exception { @Override public void updateOffset(List cloudEvents, AbstractContext context) { - cloudEvents.forEach(cloudEvent -> standaloneBroker.updateOffset( - new TopicMetadata(cloudEvent.getSubject()), Objects.requireNonNull((Long) cloudEvent.getExtension("offset")))); } @@ -99,9 +86,8 @@ public void subscribe(String topic) throws Exception { synchronized (subscribeTable) { standaloneBroker.createTopicIfAbsent(topic); Subscribe subscribe = new Subscribe(topic, standaloneBroker, listener); - SubscribeTask subScribeTask = new SubscribeTask(subscribe); + subscribe.subscribe(); subscribeTable.put(topic, subscribe); - consumeExecutorService.execute(subScribeTask); } } diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/TestUtils.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/TestUtils.java index 5ea0ab6f1a..0c16aabb35 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/TestUtils.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/TestUtils.java @@ -17,9 +17,14 @@ package org.apache.eventmesh.storage.standalone; +import org.apache.eventmesh.storage.standalone.broker.Channel; import org.apache.eventmesh.storage.standalone.broker.MessageQueue; +import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; import org.apache.eventmesh.storage.standalone.broker.model.TopicMetadata; +import org.apache.eventmesh.storage.standalone.broker.task.Subscribe; + +import org.apache.commons.lang3.tuple.Pair; import java.net.URI; import java.util.Collections; @@ -29,6 +34,7 @@ import io.cloudevents.CloudEvent; import io.cloudevents.core.builder.CloudEventBuilder; + public class TestUtils { public static final String TEST_TOPIC = "test-topic"; @@ -36,12 +42,18 @@ public class TestUtils { public static final int LENGTH = 5; public static final int EXCEEDED_MESSAGE_STORE_WINDOW = 60 * 60 * 1000 + 1000; - public static ConcurrentHashMap createDefaultMessageContainer() { - ConcurrentHashMap messageContainer = new ConcurrentHashMap<>(1); - messageContainer.put(new TopicMetadata(TEST_TOPIC), new MessageQueue()); - return messageContainer; + public static Pair, ConcurrentHashMap> createDefaultMessageContainer( + StandaloneBroker broker) { + ConcurrentHashMap messageContainer = new ConcurrentHashMap<>(1); + ConcurrentHashMap subscribeContainer = new ConcurrentHashMap<>(1); + + Subscribe subscribe = createSubscribe(broker); + subscribe.subscribe(); + subscribeContainer.put(new TopicMetadata(TEST_TOPIC), subscribe); + return Pair.of(messageContainer, subscribeContainer); } + public static ConcurrentHashMap createMessageContainer(TopicMetadata topicMetadata, MessageEntity messageEntity) throws InterruptedException { ConcurrentHashMap messageContainer = new ConcurrentHashMap<>(1); @@ -79,4 +91,15 @@ public static MessageEntity createMessageEntity(TopicMetadata topicMetadata, Clo offSet, currentTimeMillis); } + + public static Subscribe createSubscribe(StandaloneBroker standaloneBroker) { + return new Subscribe(TEST_TOPIC, standaloneBroker, (cloudEvent, context) -> { + }); + } + + public static Subscribe createSubscribe(StandaloneBroker standaloneBroker, List cloudEvents) { + return new Subscribe(TEST_TOPIC, standaloneBroker, (cloudEvent, context) -> { + cloudEvents.add(cloudEvent); + }); + } } diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdminTest.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdminTest.java index 2d84df265c..7200f902ec 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdminTest.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/admin/StandaloneAdminTest.java @@ -17,18 +17,20 @@ package org.apache.eventmesh.storage.standalone.admin; -import static org.apache.eventmesh.storage.standalone.TestUtils.LENGTH; -import static org.apache.eventmesh.storage.standalone.TestUtils.OFF_SET; import static org.apache.eventmesh.storage.standalone.TestUtils.TEST_TOPIC; import static org.apache.eventmesh.storage.standalone.TestUtils.createDefaultCloudEvent; import static org.apache.eventmesh.storage.standalone.TestUtils.createDefaultMessageContainer; import static org.apache.eventmesh.storage.standalone.TestUtils.createDefaultMessageEntity; -import org.apache.eventmesh.api.admin.TopicProperties; +import org.apache.eventmesh.storage.standalone.broker.Channel; import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; +import org.apache.eventmesh.storage.standalone.broker.model.TopicMetadata; +import org.apache.eventmesh.storage.standalone.broker.task.Subscribe; -import java.util.List; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.concurrent.ConcurrentHashMap; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; @@ -52,6 +54,7 @@ public class StandaloneAdminTest { private StandaloneAdmin standaloneAdmin; + @BeforeEach public void setUp() { initStaticInstance(); @@ -69,13 +72,6 @@ public void testIsClosed() { Assertions.assertTrue(standaloneAdmin.isClosed()); } - @Test - public void testGetTopic() throws Exception { - List topicPropertiesList = standaloneAdmin.getTopic(); - Assertions.assertNotNull(topicPropertiesList); - Assertions.assertFalse(topicPropertiesList.isEmpty()); - } - @Test public void testCreateTopic() { standaloneAdmin.createTopic(TEST_TOPIC); @@ -88,21 +84,6 @@ public void testDeleteTopic() { Mockito.verify(standaloneBroker).deleteTopicIfExist(TEST_TOPIC); } - @Test - public void testGetEvent() throws Exception { - Mockito.when(standaloneBroker.checkTopicExist(TEST_TOPIC)).thenReturn(Boolean.TRUE); - Mockito.when(standaloneBroker.getMessage(TEST_TOPIC, OFF_SET)).thenReturn(createDefaultCloudEvent()); - List events = standaloneAdmin.getEvent(TEST_TOPIC, OFF_SET, LENGTH); - Assertions.assertNotNull(events); - Assertions.assertFalse(events.isEmpty()); - } - - @Test - public void testGetEvent_throwException() { - Mockito.when(standaloneBroker.checkTopicExist(TEST_TOPIC)).thenReturn(Boolean.FALSE); - Exception exception = Assertions.assertThrows(Exception.class, () -> standaloneAdmin.getEvent(TEST_TOPIC, OFF_SET, LENGTH)); - Assertions.assertEquals("The topic name doesn't exist in the message queue", exception.getMessage()); - } @Test public void testPublish() throws Exception { @@ -116,7 +97,11 @@ public void testPublish() throws Exception { private void initStaticInstance() { try (MockedStatic standaloneBrokerMockedStatic = Mockito.mockStatic(StandaloneBroker.class)) { standaloneBrokerMockedStatic.when(StandaloneBroker::getInstance).thenReturn(standaloneBroker); - Mockito.when(standaloneBroker.getMessageContainer()).thenReturn(createDefaultMessageContainer()); + Pair, ConcurrentHashMap> pair = + createDefaultMessageContainer(standaloneBroker); + Mockito.when(standaloneBroker.getSubscribeContainer()).thenReturn(pair.getRight()); + Mockito.when(standaloneBroker.getMessageContainer()).thenReturn(pair.getLeft()); + standaloneAdmin = new StandaloneAdmin(); } } diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBrokerTest.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBrokerTest.java index 3582f95ef5..6d84cb7800 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBrokerTest.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/StandaloneBrokerTest.java @@ -17,15 +17,12 @@ package org.apache.eventmesh.storage.standalone.broker; -import static org.apache.eventmesh.storage.standalone.TestUtils.OFF_SET; import static org.apache.eventmesh.storage.standalone.TestUtils.TEST_TOPIC; import static org.apache.eventmesh.storage.standalone.TestUtils.createDefaultCloudEvent; +import static org.apache.eventmesh.storage.standalone.TestUtils.createSubscribe; import org.apache.eventmesh.storage.standalone.broker.model.MessageEntity; - -import org.apache.commons.lang3.tuple.Pair; - -import java.util.concurrent.atomic.AtomicLong; +import org.apache.eventmesh.storage.standalone.broker.task.Subscribe; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -34,6 +31,14 @@ public class StandaloneBrokerTest { + + public StandaloneBroker getStandaloneBroker() { + StandaloneBroker instance = StandaloneBroker.getInstance(); + Subscribe subscribe = createSubscribe(instance); + subscribe.subscribe(); + return instance; + } + @Test public void testGetInstance() { Assertions.assertNotNull(StandaloneBroker.getInstance()); @@ -41,49 +46,23 @@ public void testGetInstance() { @Test public void testCreateTopicIfAbsent() { - StandaloneBroker instance = StandaloneBroker.getInstance(); - Pair pair = instance.createTopicIfAbsent(TEST_TOPIC); + StandaloneBroker instance = getStandaloneBroker(); + Channel pair = instance.createTopicIfAbsent(TEST_TOPIC); Assertions.assertNotNull(pair); } @Test public void testPutMessage() throws InterruptedException { - StandaloneBroker instance = StandaloneBroker.getInstance(); + StandaloneBroker instance = getStandaloneBroker(); CloudEvent cloudEvent = createDefaultCloudEvent(); MessageEntity messageEntity = instance.putMessage(TEST_TOPIC, cloudEvent); Assertions.assertNotNull(messageEntity); } - @Test - public void testTakeMessage() throws InterruptedException { - StandaloneBroker instance = StandaloneBroker.getInstance(); - CloudEvent cloudEvent = createDefaultCloudEvent(); - instance.putMessage(TEST_TOPIC, cloudEvent); - CloudEvent message = instance.takeMessage(TEST_TOPIC); - Assertions.assertNotNull(message); - } - - @Test - public void testGetMessage() throws InterruptedException { - StandaloneBroker instance = StandaloneBroker.getInstance(); - CloudEvent cloudEvent = createDefaultCloudEvent(); - instance.putMessage(TEST_TOPIC, cloudEvent); - CloudEvent cloudEventResult = instance.getMessage(TEST_TOPIC); - Assertions.assertNotNull(cloudEventResult); - } - - @Test - public void testMessageWithOffSet() throws InterruptedException { - StandaloneBroker instance = StandaloneBroker.getInstance(); - CloudEvent cloudEvent = createDefaultCloudEvent(); - instance.putMessage(TEST_TOPIC, cloudEvent); - CloudEvent cloudEventResult = instance.getMessage(TEST_TOPIC, OFF_SET); - Assertions.assertNotNull(cloudEventResult); - } @Test public void testCheckTopicExist() throws InterruptedException { - StandaloneBroker instance = StandaloneBroker.getInstance(); + StandaloneBroker instance = getStandaloneBroker(); CloudEvent cloudEvent = createDefaultCloudEvent(); instance.putMessage(TEST_TOPIC, cloudEvent); boolean exists = instance.checkTopicExist(TEST_TOPIC); @@ -92,7 +71,7 @@ public void testCheckTopicExist() throws InterruptedException { @Test public void testDeleteTopicIfExist() throws InterruptedException { - StandaloneBroker instance = StandaloneBroker.getInstance(); + StandaloneBroker instance = getStandaloneBroker(); CloudEvent cloudEvent = createDefaultCloudEvent(); instance.putMessage(TEST_TOPIC, cloudEvent); instance.deleteTopicIfExist(TEST_TOPIC); diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTest.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTest.java index bc11c9b0aa..3ef86bdd20 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTest.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/broker/task/SubscribeTest.java @@ -18,14 +18,11 @@ package org.apache.eventmesh.storage.standalone.broker.task; import static org.apache.eventmesh.storage.standalone.TestUtils.TEST_TOPIC; -import static org.apache.eventmesh.storage.standalone.TestUtils.createDefaultCloudEvent; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyString; import org.apache.eventmesh.api.EventListener; -import org.apache.eventmesh.api.EventMeshAsyncConsumeContext; import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; import org.junit.jupiter.api.Assertions; @@ -35,8 +32,6 @@ import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import io.cloudevents.CloudEvent; - @ExtendWith(MockitoExtension.class) public class SubscribeTest { @@ -48,12 +43,9 @@ public class SubscribeTest { @Test public void testSubscribe() { - CloudEvent cloudEvent = createDefaultCloudEvent(); - Mockito.when(standaloneBroker.getMessage(anyString())).thenReturn(cloudEvent); - Mockito.when(standaloneBroker.getMessage(anyString(), anyLong())).thenReturn(cloudEvent); subscribe = new Subscribe(TEST_TOPIC, standaloneBroker, eventListener); subscribe.subscribe(); - Mockito.verify(eventListener).consume(any(CloudEvent.class), any(EventMeshAsyncConsumeContext.class)); + Mockito.verify(standaloneBroker).subscribed(anyString(), any(Subscribe.class)); } @Test diff --git a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/producer/StandaloneProducerTest.java b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/producer/StandaloneProducerTest.java index 37cdc02c6a..4bfee4976f 100644 --- a/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/producer/StandaloneProducerTest.java +++ b/eventmesh-storage-plugin/eventmesh-storage-standalone/src/test/java/org/apache/eventmesh/storage/standalone/producer/StandaloneProducerTest.java @@ -17,8 +17,11 @@ package org.apache.eventmesh.storage.standalone.producer; +import static org.apache.eventmesh.storage.standalone.TestUtils.TEST_TOPIC; + import org.apache.eventmesh.api.SendResult; import org.apache.eventmesh.storage.standalone.TestUtils; +import org.apache.eventmesh.storage.standalone.broker.StandaloneBroker; import java.util.Properties; @@ -28,10 +31,13 @@ import io.cloudevents.CloudEvent; + + public class StandaloneProducerTest { private StandaloneProducer standaloneProducer; + @BeforeEach public void setUp() { standaloneProducer = new StandaloneProducer(new Properties()); @@ -61,6 +67,8 @@ public void testShutdown() { @Test public void testPublish() { + StandaloneBroker standaloneBroker = StandaloneBroker.getInstance(); + standaloneBroker.createTopicIfAbsent(TEST_TOPIC); CloudEvent cloudEvent = TestUtils.createDefaultCloudEvent(); SendResult sendResult = standaloneProducer.publish(cloudEvent); Assertions.assertNotNull(sendResult); diff --git a/eventmesh-trace-plugin/eventmesh-trace-pinpoint/build.gradle b/eventmesh-trace-plugin/eventmesh-trace-pinpoint/build.gradle index 0758ba291e..fcb492d692 100644 --- a/eventmesh-trace-plugin/eventmesh-trace-pinpoint/build.gradle +++ b/eventmesh-trace-plugin/eventmesh-trace-pinpoint/build.gradle @@ -15,7 +15,7 @@ * limitations under the License. */ -def pinpointVersion = "2.4.1" +def pinpointVersion = "3.0.0" dependencies { implementation project(":eventmesh-trace-plugin:eventmesh-trace-api") diff --git a/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/main/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporter.java b/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/main/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporter.java index 24e41039f3..61186c3062 100644 --- a/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/main/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporter.java +++ b/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/main/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporter.java @@ -40,6 +40,8 @@ import java.util.logging.Logger; import java.util.stream.Collectors; +import org.mapstruct.factory.Mappers; + import io.grpc.NameResolverProvider; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.internal.OtelEncodingUtils; @@ -55,6 +57,7 @@ import com.navercorp.pinpoint.bootstrap.context.SpanId; import com.navercorp.pinpoint.bootstrap.context.TraceId; +import com.navercorp.pinpoint.common.profiler.util.TransactionIdUtils; import com.navercorp.pinpoint.common.trace.AnnotationKey; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.common.util.JvmUtils; @@ -67,6 +70,7 @@ import com.navercorp.pinpoint.profiler.AgentInfoSender; import com.navercorp.pinpoint.profiler.JvmInformation; import com.navercorp.pinpoint.profiler.context.DefaultServerMetaDataRegistryService; +import com.navercorp.pinpoint.profiler.context.DefaultSpanFactory; import com.navercorp.pinpoint.profiler.context.ServerMetaDataRegistryService; import com.navercorp.pinpoint.profiler.context.Span; import com.navercorp.pinpoint.profiler.context.SpanEvent; @@ -75,12 +79,24 @@ import com.navercorp.pinpoint.profiler.context.grpc.GrpcAgentInfoMessageConverter; import com.navercorp.pinpoint.profiler.context.grpc.GrpcSpanMessageConverter; import com.navercorp.pinpoint.profiler.context.grpc.config.GrpcTransportConfig; -import com.navercorp.pinpoint.profiler.context.id.DefaultTraceId; -import com.navercorp.pinpoint.profiler.context.id.DefaultTraceRoot; +import com.navercorp.pinpoint.profiler.context.grpc.config.SpanAutoUriGetter; +import com.navercorp.pinpoint.profiler.context.grpc.config.SpanUriGetter; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.AgentInfoMapper; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.AgentInfoMapperImpl; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.AnnotationValueMapper; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.JvmGcTypeMapper; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.JvmGcTypeMapperImpl; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.SpanMessageMapper; +import com.navercorp.pinpoint.profiler.context.grpc.mapper.SpanMessageMapperImpl; +import com.navercorp.pinpoint.profiler.context.id.DefaultTraceIdFactory; +import com.navercorp.pinpoint.profiler.context.id.DefaultTraceRootFactory; +import com.navercorp.pinpoint.profiler.context.id.TraceIdFactory; import com.navercorp.pinpoint.profiler.context.id.TraceRoot; +import com.navercorp.pinpoint.profiler.context.id.TraceRootFactory; import com.navercorp.pinpoint.profiler.context.provider.AgentInformationProvider; import com.navercorp.pinpoint.profiler.context.provider.grpc.DnsExecutorServiceProvider; import com.navercorp.pinpoint.profiler.context.provider.grpc.GrpcNameResolverProvider; +import com.navercorp.pinpoint.profiler.context.provider.grpc.SSLContextProvider; import com.navercorp.pinpoint.profiler.metadata.MetaDataType; import com.navercorp.pinpoint.profiler.monitor.metric.gc.JvmGcType; import com.navercorp.pinpoint.profiler.receiver.ProfilerCommandLocatorBuilder; @@ -117,6 +133,7 @@ public final class PinpointSpanExporter implements SpanExporter { private final String applicationName; private final GrpcTransportConfig grpcTransportConfig; + private final SSLContextProvider sslContextProvider; private final HeaderFactory headerFactory; @@ -124,22 +141,35 @@ public final class PinpointSpanExporter implements SpanExporter { private final SpanGrpcDataSender spanGrpcDataSender; + private final JvmGcTypeMapper jvmGcTypeMapper = new JvmGcTypeMapperImpl(); + private final AgentInfoMapper agentInfoMapper = new AgentInfoMapperImpl(jvmGcTypeMapper); + + private final SpanUriGetter spanUriGetter = new SpanAutoUriGetter(); + private final AnnotationValueMapper annotationValueMapper = Mappers.getMapper(AnnotationValueMapper.class); + private final SpanMessageMapper spanMessageMapper = new SpanMessageMapperImpl(annotationValueMapper, spanUriGetter); + public PinpointSpanExporter(final String agentId, - final String agentName, - final String applicationName, - final GrpcTransportConfig grpcTransportConfig) { + final String agentName, + final String applicationName, + final GrpcTransportConfig grpcTransportConfig) { this.agentId = Objects.requireNonNull(agentId, "agentId cannot be null"); this.agentName = Objects.requireNonNull(agentName, "agentName cannot be null"); this.applicationName = Objects.requireNonNull(applicationName, "applicationName cannot be null"); this.grpcTransportConfig = Objects.requireNonNull(grpcTransportConfig, "grpcTransportConfig cannot be null"); + if (grpcTransportConfig.getSslOption() != null) { + this.sslContextProvider = new SSLContextProvider(grpcTransportConfig); + } else { + this.sslContextProvider = null; + } + this.headerFactory = new AgentHeaderFactory( - agentId, - agentName, - applicationName, - ServiceType.UNDEFINED.getCode(), - agentStartTime); + agentId, + agentName, + applicationName, + ServiceType.UNDEFINED.getCode(), + agentStartTime); this.agentInfoSender = createAgentInfoSender(); this.agentInfoSender.start(); @@ -155,7 +185,7 @@ private AgentInfoSender createAgentInfoSender() { grpcTransportConfig.getAgentCollectorIp(), grpcTransportConfig.getAgentCollectorPort(), grpcTransportConfig.getAgentSenderExecutorQueueSize(), - new GrpcAgentInfoMessageConverter(), + new GrpcAgentInfoMessageConverter(agentInfoMapper), reconnectExecutor, scheduledExecutorService, agentChannelFactory, @@ -171,17 +201,17 @@ private AgentInfoSender createAgentInfoSender() { ServiceType.STAND_ALONE); final JvmInformation jvmInformation = new JvmInformation( - JvmUtils.getSystemProperty(SystemPropertyKey.JAVA_VERSION), - JvmGcType.UNKNOWN); + JvmUtils.getSystemProperty(SystemPropertyKey.JAVA_VERSION), + JvmGcType.UNKNOWN); final ServerMetaDataRegistryService serverMetaDataRegistryService = new DefaultServerMetaDataRegistryService( - Collections.emptyList()); + Collections.emptyList()); serverMetaDataRegistryService.setServerName(EventMeshTraceConstants.SERVICE_NAME); final AgentInfoFactory agentInfoFactory = new AgentInfoFactory( - agentInformationProvider.createAgentInformation(), - serverMetaDataRegistryService, - jvmInformation); + agentInformationProvider.createAgentInformation(), + serverMetaDataRegistryService, + jvmInformation); return new AgentInfoSender.Builder(agentGrpcDataSender, agentInfoFactory).build(); } @@ -193,7 +223,8 @@ private SpanGrpcDataSender createSpanGrpcDataSender() { new GrpcSpanMessageConverter( agentId, ServiceType.STAND_ALONE.getCode(), - new GrpcSpanProcessorV2()); + new GrpcSpanProcessorV2(), + this.spanMessageMapper); final StreamState streamState = new SimpleStreamState( @@ -201,21 +232,23 @@ private SpanGrpcDataSender createSpanGrpcDataSender() { grpcTransportConfig.getSpanClientOption().getLimitTime()); return new SpanGrpcDataSender( - grpcTransportConfig.getSpanCollectorIp(), - grpcTransportConfig.getSpanCollectorPort(), - grpcTransportConfig.getSpanSenderExecutorQueueSize(), - messageConverter, - reconnectExecutor, - spanChannelFactory, - streamState); + grpcTransportConfig.getSpanCollectorIp(), + grpcTransportConfig.getSpanCollectorPort(), + grpcTransportConfig.getSpanSenderExecutorQueueSize(), + messageConverter, + reconnectExecutor, + spanChannelFactory, + streamState, + grpcTransportConfig.getSpanRpcMaxAgeMillis()); } private ChannelFactory createAgentChannelFactory() { - final ChannelFactoryBuilder channelFactoryBuilder = - new DefaultChannelFactoryBuilder(AGENT_CHANNEL_FACTORY); + final ChannelFactoryBuilder channelFactoryBuilder = new DefaultChannelFactoryBuilder(AGENT_CHANNEL_FACTORY); channelFactoryBuilder.setHeaderFactory(headerFactory); channelFactoryBuilder.setNameResolverProvider(nameResolverProvider); - channelFactoryBuilder.setSslOption(grpcTransportConfig.getSslOption()); + if (this.sslContextProvider != null) { + channelFactoryBuilder.setSslContext(this.sslContextProvider.get()); + } channelFactoryBuilder.setClientOption(grpcTransportConfig.getAgentClientOption()); channelFactoryBuilder.setExecutorQueueSize(grpcTransportConfig.getAgentChannelExecutorQueueSize()); @@ -223,11 +256,12 @@ private ChannelFactory createAgentChannelFactory() { } private ChannelFactory createSpanChannelFactory() { - final ChannelFactoryBuilder channelFactoryBuilder = - new DefaultChannelFactoryBuilder(SPAN_CHANNEL_FACTORY); + final ChannelFactoryBuilder channelFactoryBuilder = new DefaultChannelFactoryBuilder(SPAN_CHANNEL_FACTORY); channelFactoryBuilder.setHeaderFactory(headerFactory); channelFactoryBuilder.setNameResolverProvider(nameResolverProvider); - channelFactoryBuilder.setSslOption(grpcTransportConfig.getSslOption()); + if (this.sslContextProvider != null) { + channelFactoryBuilder.setSslContext(this.sslContextProvider.get()); + } channelFactoryBuilder.setClientOption(grpcTransportConfig.getSpanClientOption()); channelFactoryBuilder.setExecutorQueueSize(grpcTransportConfig.getSpanChannelExecutorQueueSize()); @@ -289,12 +323,21 @@ private Span toSpan(final SpanData spanData) { } }); - final TraceId traceId = new DefaultTraceId(agentId, startTimestamp, transactionId, parentSpanId[0], spanId, - (short) spanData.getKind().ordinal()); - - final TraceRoot traceRoot = new DefaultTraceRoot(traceId, this.agentId, startTimestamp, transactionId); + final TraceIdFactory traceIdFactory = new DefaultTraceIdFactory(this.agentId, startTimestamp); + final TraceRootFactory traceRootFactory = new DefaultTraceRootFactory(this.agentId, traceIdFactory); + + final TraceRoot traceRoot; + if (parentSpanId[0] == SpanId.NULL) { + traceRoot = traceRootFactory.newTraceRoot(transactionId); + } else { + final TraceId traceId = traceIdFactory.continueTraceId( + TransactionIdUtils.formatString(this.agentId, startTimestamp, transactionId), parentSpanId[0], + spanId, (short) spanData.getKind().ordinal()); + traceRoot = traceRootFactory.continueTraceRoot(traceId, transactionId); + } - final Span span = new Span(traceRoot); + final DefaultSpanFactory spanFactory = new DefaultSpanFactory(); + final Span span = spanFactory.newSpan(traceRoot); final StatusData statusData = spanData.getStatus(); if (statusData != null) { @@ -315,15 +358,15 @@ private Span toSpan(final SpanData spanData) { span.setRemoteAddr(UNKNOWN_REQ_IP); Optional.ofNullable(spanData.getAttributes()) - .ifPresent(attributes -> { - span.addAnnotation(Annotations.of(AnnotationKey.HTTP_PARAM_ENTITY.getCode(), - JsonUtils.toJSONString(attributes))); - attributes.forEach((key, value) -> { - if (REQ_IP.equals(key.getKey())) { - span.setRemoteAddr(String.valueOf(value)); - } + .ifPresent(attributes -> { + span.addAnnotation(Annotations.of(AnnotationKey.HTTP_PARAM_ENTITY.getCode(), + JsonUtils.toJSONString(attributes))); + attributes.forEach((key, value) -> { + if (REQ_IP.equals(key.getKey())) { + span.setRemoteAddr(String.valueOf(value)); + } + }); }); - }); if (CollectionUtils.isNotEmpty(spanData.getEvents())) { final AtomicInteger sequence = new AtomicInteger(); @@ -342,7 +385,7 @@ private SpanEvent toSpanEvent(final EventData eventData) { spanEvent.setServiceType(ServiceType.INTERNAL_METHOD.getCode()); spanEvent.setEndPoint(eventData.getName()); spanEvent.addAnnotation(Annotations.of(AnnotationKey.HTTP_PARAM_ENTITY.getCode(), - JsonUtils.toJSONString(eventData.getAttributes()))); + JsonUtils.toJSONString(eventData.getAttributes()))); spanEvent.setElapsedTime((int) toMillis(eventData.getEpochNanos())); return spanEvent; } @@ -354,16 +397,16 @@ private static long toMillis(final long epochNanos) { private static long hex32StringToLong(final String hex32String) { final CharSequence charSequence = new StringBuilder().append(hex32String); return OtelEncodingUtils.isValidBase16String(charSequence) - ? OtelEncodingUtils.longFromBase16String(charSequence, 0) - & OtelEncodingUtils.longFromBase16String(charSequence, 16) - : hex32String.hashCode(); + ? OtelEncodingUtils.longFromBase16String(charSequence, 0) + & OtelEncodingUtils.longFromBase16String(charSequence, 16) + : hex32String.hashCode(); } private static long hex16StringToLong(final String hex16String) { final CharSequence charSequence = new StringBuilder().append(hex16String); return OtelEncodingUtils.isValidBase16String(charSequence) - ? OtelEncodingUtils.longFromBase16String(charSequence, 0) - : hex16String.hashCode(); + ? OtelEncodingUtils.longFromBase16String(charSequence, 0) + : hex16String.hashCode(); } private static String getEndpoint(final Resource resource) { diff --git a/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/test/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporterTest.java b/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/test/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporterTest.java index 5475876cef..6ae974825d 100644 --- a/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/test/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporterTest.java +++ b/eventmesh-trace-plugin/eventmesh-trace-pinpoint/src/test/java/org/apache/eventmesh/trace/pinpoint/exporter/PinpointSpanExporterTest.java @@ -21,16 +21,16 @@ import org.apache.eventmesh.trace.api.TracePluginFactory; import org.apache.eventmesh.trace.pinpoint.PinpointTraceService; import org.apache.eventmesh.trace.pinpoint.config.PinpointConfiguration; - + import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.UUID; - + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; - + import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanKind; @@ -43,10 +43,11 @@ import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.StatusData; - + public class PinpointSpanExporterTest { private PinpointSpanExporter exporter; + public static final String AGENT_ID = "test"; @BeforeEach public void setup() { @@ -73,6 +74,11 @@ public void exportTest() { spans.clear(); spans.add(new SpanDateTest()); Assertions.assertEquals(CompletableResultCode.ofSuccess(), exporter.export(spans)); + + spans.clear(); + spans.add(new SpanDateTest()); + spans.add(new ChildSpanDateTest()); + Assertions.assertEquals(CompletableResultCode.ofSuccess(), exporter.export(spans)); } @Test @@ -87,7 +93,7 @@ public void shutdownTest() { /** * for test - */ + */ private static class SpanDateTest implements SpanData { @Override @@ -171,6 +177,89 @@ public int getTotalAttributeCount() { } } + private static class ChildSpanDateTest implements SpanData { + + @Override + public SpanContext getSpanContext() { + return new SpanContextTest(); + } + + @Override + public SpanContext getParentSpanContext() { + return new SpanContextTest(); + } + + @Override + public Resource getResource() { + return null; + } + + @Override + public InstrumentationLibraryInfo getInstrumentationLibraryInfo() { + return null; + } + + @Override + public String getName() { + return this.getClass().getName(); + } + + @Override + public SpanKind getKind() { + return SpanKind.INTERNAL; + } + + @Override + public long getStartEpochNanos() { + return System.nanoTime(); + } + + @Override + public Attributes getAttributes() { + return null; + } + + @Override + public List getEvents() { + return null; + } + + @Override + public List getLinks() { + return null; + } + + @Override + public StatusData getStatus() { + return StatusData.ok(); + } + + @Override + public long getEndEpochNanos() { + return System.nanoTime(); + } + + @Override + public boolean hasEnded() { + return true; + } + + @Override + public int getTotalRecordedEvents() { + return 0; + } + + @Override + public int getTotalRecordedLinks() { + return 0; + } + + @Override + public int getTotalAttributeCount() { + return 0; + } + } + private static class SpanContextTest implements SpanContext { @Override diff --git a/resources/eventmesh-architecture-5.png b/resources/eventmesh-architecture-5.png index f4111dc176..28a040bdb2 100644 Binary files a/resources/eventmesh-architecture-5.png and b/resources/eventmesh-architecture-5.png differ diff --git a/settings.gradle b/settings.gradle index 9388e2076d..c49a5c4242 100644 --- a/settings.gradle +++ b/settings.gradle @@ -16,8 +16,8 @@ */ plugins { - id 'com.gradle.develocity' version '3.17.4' - id 'com.gradle.common-custom-user-data-gradle-plugin' version '2.0.1' + id 'com.gradle.develocity' version '3.18.1' + id 'com.gradle.common-custom-user-data-gradle-plugin' version '2.0.2' } def isCiServer = System.getenv().containsKey("CI") @@ -47,11 +47,10 @@ include 'eventmesh-common' include 'eventmesh-starter' include 'eventmesh-examples' include 'eventmesh-spi' -include 'eventmesh-filter' -include 'eventmesh-transformer' include 'eventmesh-openconnect:eventmesh-openconnect-java' include 'eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-api' +include 'eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-admin' include 'eventmesh-openconnect:eventmesh-openconnect-offsetmgmt-plugin:eventmesh-openconnect-offsetmgmt-nacos' include 'eventmesh-connectors:eventmesh-connector-openfunction' @@ -75,6 +74,7 @@ include 'eventmesh-connectors:eventmesh-connector-slack' include 'eventmesh-connectors:eventmesh-connector-wechat' include 'eventmesh-connectors:eventmesh-connector-http' include 'eventmesh-connectors:eventmesh-connector-chatgpt' +include 'eventmesh-connectors:eventmesh-connector-canal' include 'eventmesh-storage-plugin:eventmesh-storage-api' include 'eventmesh-storage-plugin:eventmesh-storage-standalone' @@ -125,6 +125,14 @@ include 'eventmesh-webhook:eventmesh-webhook-receive' include 'eventmesh-retry' include 'eventmesh-retry:eventmesh-retry-api' include 'eventmesh-retry:eventmesh-retry-rocketmq' - - +include 'eventmesh-runtime-v2' +include 'eventmesh-admin-server' +include 'eventmesh-registry' +include 'eventmesh-registry:eventmesh-registry-api' +include 'eventmesh-registry:eventmesh-registry-nacos' + +include 'eventmesh-function' +include 'eventmesh-function:eventmesh-function-api' +include 'eventmesh-function:eventmesh-function-filter' +include 'eventmesh-function:eventmesh-function-transformer'