Skip to content

Commit 9419230

Browse files
author
Alexey Kudinkin
committed
Fixing compilation
1 parent aeb98cf commit 9419230

3 files changed

Lines changed: 1 addition & 29 deletions

File tree

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestHarness.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ protected void initSparkContexts(String appName) {
199199

200200
if (sparkSessionExtensionsInjector.isPresent()) {
201201
// In case we need to inject extensions into Spark Session, we have
202-
// to stop any session that might still be active and since Spark will try
202+
// to stop any session that might still be active, since Spark will try
203203
// to re-use it
204204
HoodieConversionUtils.toJavaOption(SparkSession.getActiveSession())
205205
.ifPresent(SparkSession::stop);

hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/DefaultSource.scala

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,6 @@ import org.apache.hudi.common.model.HoodieTableType.{COPY_ON_WRITE, MERGE_ON_REA
2626
import org.apache.hudi.common.table.timeline.HoodieInstant
2727
import org.apache.hudi.common.table.{HoodieTableMetaClient, TableSchemaResolver}
2828
import org.apache.hudi.exception.HoodieException
29-
import org.apache.hudi.internal.schema.InternalSchema
30-
import org.apache.hudi.metadata.HoodieTableMetadata
3129
import org.apache.hudi.metadata.HoodieTableMetadata.isMetadataTable
3230
import org.apache.log4j.LogManager
3331
import org.apache.spark.sql.execution.streaming.{Sink, Source}
@@ -226,26 +224,6 @@ class DefaultSource extends RelationProvider
226224
new HoodieStreamSource(sqlContext, metadataPath, schema, parameters)
227225
}
228226

229-
private def resolveBaseFileOnlyRelation(sqlContext: SQLContext,
230-
globPaths: Seq[Path],
231-
userSchema: Option[StructType],
232-
metaClient: HoodieTableMetaClient,
233-
optParams: Map[String, String]): BaseRelation = {
234-
val baseRelation = new BaseFileOnlyRelation(sqlContext, metaClient, optParams, userSchema, globPaths)
235-
val enableSchemaOnRead: Boolean = !tryFetchInternalSchema(metaClient).isEmptySchema
236-
237-
// NOTE: We fallback to [[HadoopFsRelation]] in all of the cases except ones requiring usage of
238-
// [[BaseFileOnlyRelation]] to function correctly. This is necessary to maintain performance parity w/
239-
// vanilla Spark, since some of the Spark optimizations are predicated on the using of [[HadoopFsRelation]].
240-
//
241-
// You can check out HUDI-3896 for more details
242-
if (enableSchemaOnRead) {
243-
baseRelation
244-
} else {
245-
baseRelation.toHadoopFsRelation
246-
}
247-
}
248-
249227
private def resolveBaseFileOnlyRelation(sqlContext: SQLContext,
250228
globPaths: Seq[Path],
251229
userSchema: Option[StructType],

hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -94,12 +94,6 @@ class TestCOWDataSource extends HoodieClientTestBase {
9494
System.gc()
9595
}
9696

97-
override def getSparkSessionExtensionsInjector: util.Option[Consumer[SparkSessionExtensions]] =
98-
toJavaOption(
99-
Some(
100-
JFunction.toJava((receiver: SparkSessionExtensions) => new HoodieSparkSessionExtension().apply(receiver)))
101-
)
102-
10397
@Test def testShortNameStorage() {
10498
// Insert Operation
10599
val records = recordsToStrings(dataGen.generateInserts("000", 100)).toList

0 commit comments

Comments
 (0)