Skip to content

Commit 2515001

Browse files
danny0405yuzhaojing
authored andcommitted
[HUDI-4644] Change default flink profile to 1.15.x (#6445)
1 parent 093eea1 commit 2515001

4 files changed

Lines changed: 11 additions & 41 deletions

File tree

hudi-examples/hudi-examples-flink/pom.xml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -333,13 +333,6 @@
333333
<scope>test</scope>
334334
<type>test-jar</type>
335335
</dependency>
336-
<dependency>
337-
<groupId>org.apache.flink</groupId>
338-
<artifactId>flink-json</artifactId>
339-
<version>${flink.version}</version>
340-
<scope>test</scope>
341-
<type>test-jar</type>
342-
</dependency>
343336
<dependency>
344337
<groupId>org.apache.flink</groupId>
345338
<artifactId>flink-csv</artifactId>

hudi-flink-datasource/hudi-flink/pom.xml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -239,11 +239,6 @@
239239
<artifactId>jcommander</artifactId>
240240
<scope>compile</scope>
241241
</dependency>
242-
<dependency>
243-
<groupId>com.twitter</groupId>
244-
<artifactId>bijection-avro_${scala.binary.version}</artifactId>
245-
<version>0.9.7</version>
246-
</dependency>
247242
<dependency>
248243
<groupId>joda-time</groupId>
249244
<artifactId>joda-time</artifactId>

hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/ITTestDataStreamWrite.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,6 @@ private void testWriteToHoodie(
246246
execEnv.addOperator(pipeline.getTransformation());
247247

248248
if (isMor) {
249-
Pipelines.clean(conf, pipeline);
250249
Pipelines.compact(conf, pipeline);
251250
}
252251

pom.xml

Lines changed: 11 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -128,20 +128,20 @@
128128
<flink1.15.version>1.15.1</flink1.15.version>
129129
<flink1.14.version>1.14.5</flink1.14.version>
130130
<flink1.13.version>1.13.6</flink1.13.version>
131-
<flink.version>${flink1.13.version}</flink.version>
132-
<hudi.flink.module>hudi-flink1.13.x</hudi.flink.module>
133-
<flink.bundle.version>1.13</flink.bundle.version>
131+
<flink.version>${flink1.15.version}</flink.version>
132+
<hudi.flink.module>hudi-flink1.15.x</hudi.flink.module>
133+
<flink.bundle.version>1.15</flink.bundle.version>
134134
<flink.format.parquet.version>1.12.2</flink.format.parquet.version>
135135
<flink.runtime.artifactId>flink-runtime</flink.runtime.artifactId>
136-
<flink.table.runtime.artifactId>flink-table-runtime_${scala.binary.version}</flink.table.runtime.artifactId>
137-
<flink.table.planner.artifactId>flink-table-planner_${scala.binary.version}</flink.table.planner.artifactId>
136+
<flink.table.runtime.artifactId>flink-table-runtime</flink.table.runtime.artifactId>
137+
<flink.table.planner.artifactId>flink-table-planner_2.12</flink.table.planner.artifactId>
138138
<flink.parquet.artifactId>flink-parquet</flink.parquet.artifactId>
139139
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb</flink.statebackend.rocksdb.artifactId>
140140
<flink.test.utils.artifactId>flink-test-utils</flink.test.utils.artifactId>
141141
<flink.streaming.java.artifactId>flink-streaming-java</flink.streaming.java.artifactId>
142142
<flink.clients.artifactId>flink-clients</flink.clients.artifactId>
143143
<flink.connector.kafka.artifactId>flink-connector-kafka</flink.connector.kafka.artifactId>
144-
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
144+
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_2.12</flink.hadoop.compatibility.artifactId>
145145
<spark31.version>3.1.3</spark31.version>
146146
<spark32.version>3.2.1</spark32.version>
147147
<spark33.version>3.3.0</spark33.version>
@@ -1821,20 +1821,6 @@
18211821

18221822
<profile>
18231823
<id>flink1.15</id>
1824-
<properties>
1825-
<flink.version>${flink1.15.version}</flink.version>
1826-
<flink.table.runtime.artifactId>flink-table-runtime</flink.table.runtime.artifactId>
1827-
<flink.parquet.artifactId>flink-parquet</flink.parquet.artifactId>
1828-
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb</flink.statebackend.rocksdb.artifactId>
1829-
<flink.test.utils.artifactId>flink-test-utils</flink.test.utils.artifactId>
1830-
<flink.streaming.java.artifactId>flink-streaming-java</flink.streaming.java.artifactId>
1831-
<flink.clients.artifactId>flink-clients</flink.clients.artifactId>
1832-
<flink.connector.kafka.artifactId>flink-connector-kafka</flink.connector.kafka.artifactId>
1833-
<!-- 1.15 only supports scala2.12 -->
1834-
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_2.12</flink.hadoop.compatibility.artifactId>
1835-
<hudi.flink.module>hudi-flink1.15.x</hudi.flink.module>
1836-
<flink.bundle.version>1.15</flink.bundle.version>
1837-
</properties>
18381824
<activation>
18391825
<property>
18401826
<name>flink1.15</name>
@@ -1845,31 +1831,30 @@
18451831
<id>flink1.14</id>
18461832
<properties>
18471833
<flink.version>${flink1.14.version}</flink.version>
1834+
<hudi.flink.module>hudi-flink1.14.x</hudi.flink.module>
1835+
<flink.bundle.version>1.14</flink.bundle.version>
18481836
<flink.table.runtime.artifactId>flink-table-runtime_${scala.binary.version}</flink.table.runtime.artifactId>
1837+
<flink.table.planner.artifactId>flink-table-planner_${scala.binary.version}</flink.table.planner.artifactId>
18491838
<flink.parquet.artifactId>flink-parquet_${scala.binary.version}</flink.parquet.artifactId>
18501839
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb_${scala.binary.version}</flink.statebackend.rocksdb.artifactId>
18511840
<flink.test.utils.artifactId>flink-test-utils_${scala.binary.version}</flink.test.utils.artifactId>
18521841
<flink.streaming.java.artifactId>flink-streaming-java_${scala.binary.version}</flink.streaming.java.artifactId>
18531842
<flink.clients.artifactId>flink-clients_${scala.binary.version}</flink.clients.artifactId>
18541843
<flink.connector.kafka.artifactId>flink-connector-kafka_${scala.binary.version}</flink.connector.kafka.artifactId>
18551844
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
1856-
<hudi.flink.module>hudi-flink1.14.x</hudi.flink.module>
1857-
<flink.bundle.version>1.14</flink.bundle.version>
18581845
</properties>
18591846
<activation>
1860-
<activeByDefault>true</activeByDefault>
18611847
<property>
18621848
<name>flink1.14</name>
1863-
<!-- add flink1.14 module to all profile -->
1864-
<value>!disabled</value>
18651849
</property>
18661850
</activation>
18671851
</profile>
18681852
<profile>
18691853
<id>flink1.13</id>
18701854
<properties>
1871-
<flink.scala.bnary.version>2.11</flink.scala.bnary.version>
18721855
<flink.version>${flink1.13.version}</flink.version>
1856+
<hudi.flink.module>hudi-flink1.13.x</hudi.flink.module>
1857+
<flink.bundle.version>1.13</flink.bundle.version>
18731858
<flink.runtime.artifactId>flink-runtime_${scala.binary.version}</flink.runtime.artifactId>
18741859
<flink.table.runtime.artifactId>flink-table-runtime-blink_${scala.binary.version}</flink.table.runtime.artifactId>
18751860
<flink.table.planner.artifactId>flink-table-planner-blink_${scala.binary.version}</flink.table.planner.artifactId>
@@ -1880,8 +1865,6 @@
18801865
<flink.clients.artifactId>flink-clients_${scala.binary.version}</flink.clients.artifactId>
18811866
<flink.connector.kafka.artifactId>flink-connector-kafka_${scala.binary.version}</flink.connector.kafka.artifactId>
18821867
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
1883-
<hudi.flink.module>hudi-flink1.13.x</hudi.flink.module>
1884-
<flink.bundle.version>1.13</flink.bundle.version>
18851868
<skipITs>true</skipITs>
18861869
</properties>
18871870
<activation>

0 commit comments

Comments
 (0)