Skip to content

Commit

Permalink
[#3514] improvement(flink-connector): add flink-connector-runtime to …
Browse files Browse the repository at this point in the history
…build flink connector (#4815)


### What changes were proposed in this pull request?

- add flink runtime connector

### Why are the changes needed?

Fix: #3514 

### Does this PR introduce _any_ user-facing change?

- no

### How was this patch tested?

- local test

Co-authored-by: Peidian li <[email protected]>
Co-authored-by: fanng <[email protected]>
  • Loading branch information
3 people authored Aug 30, 2024
1 parent 1fd903e commit 9ebe6ac
Show file tree
Hide file tree
Showing 33 changed files with 289 additions and 164 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/flink-integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ jobs:
- name: Flink Integration Test
id: integrationTest
run: |
./gradlew -PskipTests -PtestMode=embedded -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :flink-connector:test --tests "org.apache.gravitino.flink.connector.integration.test.**"
./gradlew -PskipTests -PtestMode=deploy -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :flink-connector:test --tests "org.apache.gravitino.flink.connector.integration.test.**"
./gradlew -PskipTests -PtestMode=embedded -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :flink-connector:flink:test --tests "org.apache.gravitino.flink.connector.integration.test.**"
./gradlew -PskipTests -PtestMode=deploy -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :flink-connector:flink:test --tests "org.apache.gravitino.flink.connector.integration.test.**"
- name: Upload integrate tests reports
uses: actions/upload-artifact@v3
Expand All @@ -96,9 +96,9 @@ jobs:
name: flink-connector-integrate-test-reports-${{ matrix.java-version }}
path: |
build/reports
flink-connector/build/flink-connector-integration-test.log
flink-connector/build/*.tar
flink-connector/flink/build/*.log
flink-connector/flink/build/*.tar
distribution/package/logs/gravitino-server.out
distribution/package/logs/gravitino-server.log
catalogs/**/*.log
catalogs/**/*.tar
catalogs/**/*.tar
6 changes: 3 additions & 3 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -736,7 +736,7 @@ tasks {
if (!it.name.startsWith("catalog") &&
!it.name.startsWith("authorization") &&
!it.name.startsWith("client") && !it.name.startsWith("filesystem") && !it.name.startsWith("spark") && !it.name.startsWith("iceberg") && it.name != "trino-connector" &&
it.name != "integration-test" && it.name != "bundled-catalog" && it.name != "flink-connector"
it.name != "integration-test" && it.name != "bundled-catalog" && !it.name.startsWith("flink")
) {
from(it.configurations.runtimeClasspath)
into("distribution/package/libs")
Expand All @@ -753,9 +753,9 @@ tasks {
!it.name.startsWith("spark") &&
!it.name.startsWith("iceberg") &&
!it.name.startsWith("integration-test") &&
!it.name.startsWith("flink") &&
it.name != "trino-connector" &&
it.name != "bundled-catalog" &&
it.name != "flink-connector"
it.name != "bundled-catalog"
) {
dependsOn("${it.name}:build")
from("${it.name}/build/libs")
Expand Down
2 changes: 1 addition & 1 deletion docs/flink-connector/flink-connector.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ This capability allows users to perform federation queries, accessing data from

## How to use it

1. [Build](../how-to-build.md) or [download](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-flink-connector) the Gravitino flink connector jar, and place it to the classpath of Flink.
1. [Build](../how-to-build.md) or [download](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-flink-connector-runtime-1.18) the Gravitino flink connector runtime jar, and place it to the classpath of Flink.
2. Configure the Flink configuration to use the Gravitino flink connector.

| Property | Type | Default Value | Description | Required | Since Version |
Expand Down
157 changes: 3 additions & 154 deletions flink-connector/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -16,158 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
plugins {
`maven-publish`
id("java")
id("idea")
}

repositories {
mavenCentral()
}

val flinkVersion: String = libs.versions.flink.get()

// The Flink only support scala 2.12, and all scala api will be removed in a future version.
// You can find more detail at the following issues:
// https://issues.apache.org/jira/browse/FLINK-23986,
// https://issues.apache.org/jira/browse/FLINK-20845,
// https://issues.apache.org/jira/browse/FLINK-13414.
val scalaVersion: String = "2.12"
val artifactName = "gravitino-${project.name}_$scalaVersion"

dependencies {
implementation(project(":api"))
implementation(project(":catalogs:catalog-common"))
implementation(project(":common"))
implementation(project(":core"))
implementation(project(":clients:client-java"))

implementation(libs.bundles.log4j)
implementation(libs.commons.lang3)
implementation(libs.guava)
implementation(libs.httpclient5)
implementation(libs.jackson.databind)
implementation(libs.jackson.annotations)
implementation(libs.jackson.datatype.jdk8)
implementation(libs.jackson.datatype.jsr310)

implementation("org.apache.flink:flink-connector-hive_$scalaVersion:$flinkVersion")
implementation("org.apache.flink:flink-table-common:$flinkVersion")
implementation("org.apache.flink:flink-table-api-java:$flinkVersion")

implementation(libs.hive2.exec) {
artifact {
classifier = "core"
}
exclude("com.fasterxml.jackson.core")
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.protobuf")
exclude("org.apache.avro")
exclude("org.apache.calcite")
exclude("org.apache.calcite.avatica")
exclude("org.apache.curator")
exclude("org.apache.hadoop", "hadoop-yarn-server-resourcemanager")
exclude("org.apache.logging.log4j")
exclude("org.apache.zookeeper")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
exclude("org.openjdk.jol")
exclude("org.pentaho")
exclude("org.slf4j")
}

testAnnotationProcessor(libs.lombok)

testCompileOnly(libs.lombok)
testImplementation(project(":integration-test-common", "testArtifacts"))
testImplementation(project(":server"))
testImplementation(project(":server-common"))
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.junit.jupiter.params)
testImplementation(libs.mockito.core)
testImplementation(libs.mysql.driver)
testImplementation(libs.sqlite.jdbc)
testImplementation(libs.testcontainers)
testImplementation(libs.testcontainers.junit.jupiter)
testImplementation(libs.testcontainers.mysql)

testImplementation(libs.hadoop2.common) {
exclude("*")
}
testImplementation(libs.hadoop2.hdfs) {
exclude("com.sun.jersey")
exclude("commons-cli", "commons-cli")
exclude("commons-io", "commons-io")
exclude("commons-codec", "commons-codec")
exclude("commons-logging", "commons-logging")
exclude("javax.servlet", "servlet-api")
exclude("org.mortbay.jetty")
}
testImplementation(libs.hadoop2.mapreduce.client.core) {
exclude("*")
}
testImplementation(libs.hive2.common) {
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
}
testImplementation(libs.hive2.metastore) {
exclude("co.cask.tephra")
exclude("com.github.joshelser")
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.code.findbugs", "sr305")
exclude("com.tdunning", "json")
exclude("com.zaxxer", "HikariCP")
exclude("io.dropwizard.metricss")
exclude("javax.transaction", "transaction-api")
exclude("org.apache.avro")
exclude("org.apache.curator")
exclude("org.apache.hbase")
exclude("org.apache.hadoop", "hadoop-yarn-server-resourcemanager")
exclude("org.apache.logging.log4j")
exclude("org.apache.parquet", "parquet-hadoop-bundle")
exclude("org.apache.zookeeper")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
exclude("org.slf4j")
}
testImplementation("org.apache.flink:flink-table-api-bridge-base:$flinkVersion") {
exclude("commons-cli", "commons-cli")
exclude("commons-io", "commons-io")
exclude("com.google.code.findbugs", "jsr305")
}
testImplementation("org.apache.flink:flink-table-planner_$scalaVersion:$flinkVersion")
testImplementation("org.apache.flink:flink-test-utils:$flinkVersion")

testRuntimeOnly(libs.junit.jupiter.engine)
}

tasks.test {
val skipUTs = project.hasProperty("skipTests")
if (skipUTs) {
// Only run integration tests
include("**/integration/**")
}

val skipITs = project.hasProperty("skipITs")
val skipFlinkITs = project.hasProperty("skipFlinkITs")
if (skipITs || skipFlinkITs) {
// Exclude integration tests
exclude("**/integration/**")
} else {
dependsOn(tasks.jar)
dependsOn(":catalogs:catalog-hive:jar")
}
}

tasks.withType<Jar> {
archiveBaseName.set(artifactName)
}

publishing {
publications {
withType<MavenPublication>().configureEach {
artifactId = artifactName
}
}
}
tasks.all {
enabled = false
}
73 changes: 73 additions & 0 deletions flink-connector/flink-runtime/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar

plugins {
`maven-publish`
id("java")
id("idea")
alias(libs.plugins.shadow)
}

repositories {
mavenCentral()
}

val flinkVersion: String = libs.versions.flink.get()
val flinkMajorVersion: String = flinkVersion.substringBeforeLast(".")

// The Flink only support scala 2.12, and all scala api will be removed in a future version.
// You can find more detail at the following issues:
// https://issues.apache.org/jira/browse/FLINK-23986,
// https://issues.apache.org/jira/browse/FLINK-20845,
// https://issues.apache.org/jira/browse/FLINK-13414.
val scalaVersion: String = "2.12"
val artifactName = "gravitino-${project.name}_$scalaVersion"
val baseName = "${rootProject.name}-flink-connector-runtime-${flinkMajorVersion}_$scalaVersion"

dependencies {
implementation(project(":clients:client-java-runtime", configuration = "shadow"))
implementation(project(":flink-connector:flink"))
}

tasks.withType<ShadowJar>(ShadowJar::class.java) {
isZip64 = true
configurations = listOf(project.configurations.runtimeClasspath.get())
archiveFileName.set("$baseName-$version.jar")
archiveClassifier.set("")

// Relocate dependencies to avoid conflicts
relocate("com.google", "org.apache.gravitino.shaded.com.google")
relocate("google", "org.apache.gravitino.shaded.google")
relocate("org.apache.hc", "org.apache.gravitino.shaded.org.apache.hc")
relocate("org.apache.commons", "org.apache.gravitino.shaded.org.apache.commons")
}

publishing {
publications {
withType<MavenPublication>().configureEach {
artifactId = baseName
}
}
}

tasks.jar {
dependsOn(tasks.named("shadowJar"))
archiveClassifier.set("empty")
}
Loading

0 comments on commit 9ebe6ac

Please sign in to comment.