-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-21723][ML] Fix writing LibSVM (key not found: numFeatures) #18872
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,13 +19,16 @@ package org.apache.spark.ml.source.libsvm | |
|
|
||
| import java.io.{File, IOException} | ||
| import java.nio.charset.StandardCharsets | ||
| import java.util.List | ||
|
|
||
| import com.google.common.io.Files | ||
|
|
||
| import org.apache.spark.SparkFunSuite | ||
| import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector, Vectors} | ||
| import org.apache.spark.ml.linalg.SQLDataTypes.VectorType | ||
| import org.apache.spark.mllib.util.MLlibTestSparkContext | ||
| import org.apache.spark.sql.{Row, SaveMode} | ||
| import org.apache.spark.sql.types.{DoubleType, StructField, StructType} | ||
| import org.apache.spark.util.Utils | ||
|
|
||
|
|
||
|
|
@@ -109,14 +112,15 @@ class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext { | |
| test("write libsvm data and read it again") { | ||
| val df = spark.read.format("libsvm").load(path) | ||
| val tempDir2 = new File(tempDir, "read_write_test") | ||
| val writepath = tempDir2.toURI.toString | ||
| val writePath = tempDir2.toURI.toString | ||
|
||
| // TODO: Remove requirement to coalesce by supporting multiple reads. | ||
| df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writepath) | ||
| df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writePath) | ||
|
|
||
| val df2 = spark.read.format("libsvm").load(writepath) | ||
| val df2 = spark.read.format("libsvm").load(writePath) | ||
| val row1 = df2.first() | ||
| val v = row1.getAs[SparseVector](1) | ||
| assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0)))) | ||
| Utils.deleteRecursively(tempDir2) | ||
|
||
| } | ||
|
|
||
| test("write libsvm data failed due to invalid schema") { | ||
|
|
@@ -126,6 +130,29 @@ class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext { | |
| } | ||
| } | ||
|
|
||
| test("write libsvm data from scratch and read it again") { | ||
| val rawData = new java.util.ArrayList[Row]() | ||
| rawData.add(Row(1.0, Vectors.sparse(3, Seq((0, 2.0), (1, 3.0))))) | ||
| rawData.add(Row(4.0, Vectors.sparse(3, Seq((0, 5.0), (2, 6.0))))) | ||
|
|
||
|
||
| val struct = StructType( | ||
| StructField("labelFoo", DoubleType, false) :: | ||
| StructField("featuresBar", VectorType, false) :: Nil | ||
| ) | ||
| val df = spark.sqlContext.createDataFrame(rawData, struct) | ||
|
|
||
| val tempDir2 = new File(tempDir, "read_write_test_2") | ||
| val writePath = tempDir2.toURI.toString | ||
|
|
||
| df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writePath) | ||
|
|
||
| val df2 = spark.read.format("libsvm").load(writePath) | ||
| val row1 = df2.first() | ||
| val v = row1.getAs[SparseVector](1) | ||
| assert(v == Vectors.sparse(3, Seq((0, 2.0), (1, 3.0)))) | ||
| Utils.deleteRecursively(tempDir2) | ||
| } | ||
|
|
||
| test("select features from libsvm relation") { | ||
| val df = spark.read.format("libsvm").load(path) | ||
| df.select("features").rdd.map { case Row(d: Vector) => d }.first | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I suggest the temp dir name to be
Identifiable.randomUID("read_write_test"). Avoid conflicts with other parallel running tests.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Use
Utils.createTempDirUh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Utils.createTempDirseems to be a nicer way. The directory is automatically deleted when VM shuts down, so I believe no manual cleanup (cf. comment below) is needed.