@@ -145,8 +145,10 @@ class KafkaSourceSuite extends KafkaSourceTest {
145145 .option(" kafka.metadata.max.age.ms" , " 1" )
146146 .option(" subscribePattern" , s " topic-.* " )
147147
148- val kafka = reader.load().select(" key" , " value" ).as[(Array [Byte ], Array [Byte ])]
149- val mapped = kafka.map(kv => new String (kv._2).toInt + 1 )
148+ val kafka = reader.load()
149+ .selectExpr(" CAST(key AS STRING)" , " CAST(value AS STRING)" )
150+ .as[(String , String )]
151+ val mapped = kafka.map(kv => kv._2.toInt + 1 )
150152
151153 testStream(mapped)(
152154 StopStream
@@ -190,8 +192,10 @@ class KafkaSourceSuite extends KafkaSourceTest {
190192 .option(" kafka.metadata.max.age.ms" , " 1" )
191193 .option(" subscribePattern" , s " $topicPrefix-.* " )
192194
193- val kafka = reader.load().select(" key" , " value" ).as[(Array [Byte ], Array [Byte ])]
194- val mapped = kafka.map(kv => new String (kv._2).toInt + 1 )
195+ val kafka = reader.load()
196+ .selectExpr(" CAST(key AS STRING)" , " CAST(value AS STRING)" )
197+ .as[(String , String )]
198+ val mapped = kafka.map(kv => kv._2.toInt + 1 )
195199
196200 testStream(mapped)(
197201 makeSureGetOffsetCalled,
@@ -272,8 +276,10 @@ class KafkaSourceSuite extends KafkaSourceTest {
272276 .option(" kafka.bootstrap.servers" , testUtils.brokerAddress)
273277 .option(" kafka.metadata.max.age.ms" , " 1" )
274278 options.foreach { case (k, v) => reader.option(k, v) }
275- val kafka = reader.load().select(" key" , " value" ).as[(Array [Byte ], Array [Byte ])]
276- val mapped = kafka.map(kv => new String (kv._2).toInt + 1 )
279+ val kafka = reader.load()
280+ .selectExpr(" CAST(key AS STRING)" , " CAST(value AS STRING)" )
281+ .as[(String , String )]
282+ val mapped = kafka.map(kv => kv._2.toInt + 1 )
277283
278284 testStream(mapped)(
279285 makeSureGetOffsetCalled,
@@ -309,8 +315,10 @@ class KafkaSourceSuite extends KafkaSourceTest {
309315 .option(" kafka.bootstrap.servers" , testUtils.brokerAddress)
310316 .option(" kafka.metadata.max.age.ms" , " 1" )
311317 options.foreach { case (k, v) => reader.option(k, v) }
312- val kafka = reader.load().select(" key" , " value" ).as[(Array [Byte ], Array [Byte ])]
313- val mapped = kafka.map(kv => new String (kv._2).toInt + 1 )
318+ val kafka = reader.load()
319+ .selectExpr(" CAST(key AS STRING)" , " CAST(value AS STRING)" )
320+ .as[(String , String )]
321+ val mapped = kafka.map(kv => kv._2.toInt + 1 )
314322
315323 testStream(mapped)(
316324 AddKafkaData (Set (topic), 4 , 5 , 6 ), // Add data when stream is stopped
@@ -368,10 +376,10 @@ class KafkaSourceStressSuite extends KafkaSourceTest with BeforeAndAfter {
368376 .option(" subscribePattern" , " stress.*" )
369377 .option(" failOnCorruptMetadata" , " false" )
370378 .load()
371- .select( " key" , " value" )
372- .as[(Array [ Byte ], Array [ Byte ] )]
379+ .selectExpr( " CAST( key AS STRING) " , " CAST( value AS STRING) " )
380+ .as[(String , String )]
373381
374- val mapped = kafka.map(kv => new String ( kv._2) .toInt + 1 )
382+ val mapped = kafka.map(kv => kv._2.toInt + 1 )
375383
376384 runStressTest(
377385 mapped,
0 commit comments