1717
1818package org .apache .hudi .utilities ;
1919
20- import com .beust .jcommander .JCommander ;
21- import com .beust .jcommander .Parameter ;
22- import org .apache .hadoop .fs .FileSystem ;
23- import org .apache .hadoop .fs .Path ;
24- import org .apache .hadoop .hive .conf .HiveConf ;
25- import org .apache .hudi .DataSourceUtils ;
2620import org .apache .hudi .DataSourceWriteOptions ;
2721import org .apache .hudi .client .SparkRDDWriteClient ;
2822import org .apache .hudi .common .config .TypedProperties ;
3731import org .apache .hudi .config .HoodieWriteConfig ;
3832import org .apache .hudi .exception .HoodieException ;
3933import org .apache .hudi .hive .HiveSyncConfig ;
34+ import org .apache .hudi .hive .HiveSyncConfigHolder ;
4035import org .apache .hudi .hive .HiveSyncTool ;
4136import org .apache .hudi .keygen .constant .KeyGeneratorOptions ;
37+ import org .apache .hudi .sync .common .HoodieSyncConfig ;
4238import org .apache .hudi .table .HoodieSparkTable ;
4339
40+ import com .beust .jcommander .JCommander ;
41+ import com .beust .jcommander .Parameter ;
42+ import org .apache .hadoop .conf .Configuration ;
43+ import org .apache .hadoop .fs .FileSystem ;
44+ import org .apache .hadoop .fs .Path ;
45+ import org .apache .hadoop .hive .conf .HiveConf ;
4446import org .apache .log4j .LogManager ;
4547import org .apache .log4j .Logger ;
4648import org .apache .spark .SparkConf ;
4749import org .apache .spark .api .java .JavaSparkContext ;
4850
49- import scala .Tuple2 ;
50-
5151import java .io .Serializable ;
5252import java .util .ArrayList ;
5353import java .util .Arrays ;
5656import java .util .Objects ;
5757import java .util .stream .Collectors ;
5858
59+ import scala .Tuple2 ;
60+
5961/**
6062 * A tool with spark-submit to drop Hudi table partitions.
6163 *
@@ -352,11 +354,13 @@ private HiveSyncConfig buildHiveSyncProps() {
352354 props .put (DataSourceWriteOptions .HIVE_SYNC_MODE ().key (), cfg .hiveSyncMode );
353355 props .put (DataSourceWriteOptions .HIVE_IGNORE_EXCEPTIONS ().key (), cfg .hiveSyncIgnoreException );
354356 props .put (DataSourceWriteOptions .HIVE_PASS ().key (), cfg .hivePassWord );
357+ props .put (HiveSyncConfig .META_SYNC_BASE_PATH , cfg .basePath );
358+ props .put (HiveSyncConfig .META_SYNC_BASE_FILE_FORMAT , "PARQUET" );
355359 props .put (DataSourceWriteOptions .PARTITIONS_TO_DELETE ().key (), cfg .partitions );
356360 props .put (DataSourceWriteOptions .HIVE_PARTITION_EXTRACTOR_CLASS ().key (), cfg .partitionValueExtractorClass );
357361 props .put (KeyGeneratorOptions .PARTITIONPATH_FIELD_NAME .key (), cfg .hivePartitionsField );
358362
359- return DataSourceUtils . buildHiveSyncConfig (props , cfg . basePath , "PARQUET" );
363+ return new HiveSyncConfig (props , new Configuration () );
360364 }
361365
362366 private void verifyHiveConfigs () {
@@ -366,9 +370,9 @@ private void verifyHiveConfigs() {
366370
367371 private void syncHive (HiveSyncConfig hiveSyncConfig ) {
368372 LOG .info ("Syncing target hoodie table with hive table("
369- + hiveSyncConfig .tableName
373+ + hiveSyncConfig .getStringOrDefault ( HoodieSyncConfig . META_SYNC_TABLE_NAME )
370374 + "). Hive metastore URL :"
371- + hiveSyncConfig .jdbcUrl
375+ + hiveSyncConfig .getStringOrDefault ( HiveSyncConfigHolder . HIVE_URL )
372376 + ", basePath :" + cfg .basePath );
373377 LOG .info ("Hive Sync Conf => " + hiveSyncConfig .toString ());
374378 FileSystem fs = FSUtils .getFs (cfg .basePath , jsc .hadoopConfiguration ());
@@ -378,7 +382,7 @@ private void syncHive(HiveSyncConfig hiveSyncConfig) {
378382 }
379383 hiveConf .addResource (fs .getConf ());
380384 LOG .info ("Hive Conf => " + hiveConf .getAllProperties ().toString ());
381- HiveSyncTool hiveSyncTool = new HiveSyncTool (hiveSyncConfig , hiveConf , fs );
385+ HiveSyncTool hiveSyncTool = new HiveSyncTool (hiveSyncConfig . getProps () , hiveConf );
382386 hiveSyncTool .syncHoodieTable ();
383387 }
384388
0 commit comments