Skip to content

Commit 3fe486b

Browse files
committed
README.md update
1 parent d094659 commit 3fe486b

File tree

3 files changed

+53
-52
lines changed

3 files changed

+53
-52
lines changed

.github/workflows/ci.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,14 @@ jobs:
2929
strategy:
3030
matrix:
3131
os: [ubuntu-latest]
32-
scala: [2.13.8, 2.12.15]
32+
scala: [2.13.8, 2.12.16]
3333
java: [temurin@8]
34-
project: [root-spark30, root-spark31, root-spark32]
34+
project: [root-spark31, root-spark32, root-spark33]
3535
exclude:
36-
- scala: 2.13.8
37-
project: root-spark30
3836
- scala: 2.13.8
3937
project: root-spark31
38+
- scala: 2.13.8
39+
project: root-spark32
4040
runs-on: ${{ matrix.os }}
4141
steps:
4242
- name: Checkout current branch (full)
@@ -104,7 +104,7 @@ jobs:
104104
strategy:
105105
matrix:
106106
os: [ubuntu-latest]
107-
scala: [2.12.15]
107+
scala: [2.12.16]
108108
java: [temurin@8]
109109
runs-on: ${{ matrix.os }}
110110
steps:
@@ -160,7 +160,7 @@ jobs:
160160
strategy:
161161
matrix:
162162
os: [ubuntu-latest]
163-
scala: [2.12.15]
163+
scala: [2.12.16]
164164
java: [temurin@8]
165165
runs-on: ${{ matrix.os }}
166166
steps:

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ The compatible versions of [Spark](http://spark.apache.org/) and
3838
| 0.11.0* | 3.2.0 / 3.1.2 / 3.0.1| 2.x | 2.x | 2.12 / 2.13
3939
| 0.11.1 | 3.2.0 / 3.1.2 / 3.0.1 | 2.x | 2.x | 2.12 / 2.13
4040
| 0.12.0 | 3.2.1 / 3.1.3 / 3.0.3 | 2.x | 3.x | 2.12 / 2.13
41+
| 0.13.0 | 3.3.0 / 3.2.1 /3.1.3 | 2.x | 3.x | 2.12 / 2.13
4142

4243
_\* 0.11.0 has broken Spark 3.1.2 and 3.0.1 artifacts published._
4344

@@ -49,8 +50,8 @@ Starting 0.11 we introduced Spark cross published artifacts:
4950
Artifact names examples:
5051

5152
* `frameless-dataset` (the latest Spark dependency)
53+
* `frameless-dataset-spark32` (Spark 3.2.x dependency)
5254
* `frameless-dataset-spark31` (Spark 3.1.x dependency)
53-
* `frameless-dataset-spark30` (Spark 3.0.x dependency)
5455

5556
Versions 0.5.x and 0.6.x have identical features. The first is compatible with Spark 2.2.1 and the second with 2.3.0.
5657

build.sbt

Lines changed: 45 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
val sparkVersion = "3.3.0"
2+
val spark32Version = "3.2.1"
23
val spark31Version = "3.1.3"
3-
val spark30Version = "3.0.3"
44
val catsCoreVersion = "2.8.0"
55
val catsEffectVersion = "3.3.13"
66
val catsMtlVersion = "1.3.0"
@@ -11,7 +11,7 @@ val scalacheck = "1.16.0"
1111
val scalacheckEffect = "1.0.4"
1212
val refinedVersion = "0.10.1"
1313

14-
val Scala212 = "2.12.15"
14+
val Scala212 = "2.12.16"
1515
val Scala213 = "2.13.8"
1616

1717
ThisBuild / tlBaseVersion := "0.12"
@@ -23,23 +23,23 @@ ThisBuild / githubWorkflowArtifactUpload := false // doesn't work with scoverage
2323

2424
lazy val root = project.in(file("."))
2525
.enablePlugins(NoPublishPlugin)
26-
.aggregate(`root-spark32`, `root-spark31`, `root-spark30`, docs)
26+
.aggregate(`root-spark33`, `root-spark32`, `root-spark31`, docs)
27+
28+
lazy val `root-spark33` = project
29+
.in(file(".spark33"))
30+
.enablePlugins(NoPublishPlugin)
31+
.aggregate(core, cats, dataset, refined, ml)
2732

2833
lazy val `root-spark32` = project
2934
.in(file(".spark32"))
3035
.enablePlugins(NoPublishPlugin)
31-
.aggregate(core, cats, dataset, refined, ml)
36+
.aggregate(core, `cats-spark32`, `dataset-spark32`, `refined-spark32`, `ml-spark32`)
3237

3338
lazy val `root-spark31` = project
3439
.in(file(".spark31"))
3540
.enablePlugins(NoPublishPlugin)
3641
.aggregate(core, `cats-spark31`, `dataset-spark31`, `refined-spark31`, `ml-spark31`)
3742

38-
lazy val `root-spark30` = project
39-
.in(file(".spark30"))
40-
.enablePlugins(NoPublishPlugin)
41-
.aggregate(core, `cats-spark30`, `dataset-spark30`, `refined-spark30`, `ml-spark30`)
42-
4343
lazy val core = project
4444
.settings(name := "frameless-core")
4545
.settings(framelessSettings)
@@ -49,61 +49,61 @@ lazy val cats = project
4949
.settings(catsSettings)
5050
.dependsOn(dataset % "test->test;compile->compile;provided->provided")
5151

52+
lazy val `cats-spark32` = project
53+
.settings(name := "frameless-cats-spark32")
54+
.settings(sourceDirectory := (cats / sourceDirectory).value)
55+
.settings(catsSettings)
56+
.settings(spark32Settings)
57+
.dependsOn(`dataset-spark32` % "test->test;compile->compile;provided->provided")
58+
5259
lazy val `cats-spark31` = project
5360
.settings(name := "frameless-cats-spark31")
5461
.settings(sourceDirectory := (cats / sourceDirectory).value)
5562
.settings(catsSettings)
5663
.settings(spark31Settings)
5764
.dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided")
5865

59-
lazy val `cats-spark30` = project
60-
.settings(name := "frameless-cats-spark30")
61-
.settings(sourceDirectory := (cats / sourceDirectory).value)
62-
.settings(catsSettings)
63-
.settings(spark30Settings)
64-
.dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided")
65-
6666
lazy val dataset = project
6767
.settings(name := "frameless-dataset")
6868
.settings(datasetSettings)
6969
.settings(sparkDependencies(sparkVersion))
7070
.dependsOn(core % "test->test;compile->compile")
7171

72-
lazy val `dataset-spark31` = project
73-
.settings(name := "frameless-dataset-spark31")
72+
lazy val `dataset-spark32` = project
73+
.settings(name := "frameless-dataset-spark32")
7474
.settings(sourceDirectory := (dataset / sourceDirectory).value)
7575
.settings(datasetSettings)
76-
.settings(sparkDependencies(spark31Version))
77-
.settings(spark31Settings)
76+
.settings(sparkDependencies(spark32Version))
77+
.settings(spark32Settings)
7878
.dependsOn(core % "test->test;compile->compile")
7979

80-
lazy val `dataset-spark30` = project
81-
.settings(name := "frameless-dataset-spark30")
80+
lazy val `dataset-spark31` = project
81+
.settings(name := "frameless-dataset-spark31")
8282
.settings(sourceDirectory := (dataset / sourceDirectory).value)
8383
.settings(datasetSettings)
84-
.settings(sparkDependencies(spark30Version))
85-
.settings(spark30Settings)
84+
.settings(sparkDependencies(spark31Version))
85+
.settings(spark31Settings)
8686
.dependsOn(core % "test->test;compile->compile")
8787

8888
lazy val refined = project
8989
.settings(name := "frameless-refined")
9090
.settings(refinedSettings)
9191
.dependsOn(dataset % "test->test;compile->compile;provided->provided")
9292

93+
lazy val `refined-spark32` = project
94+
.settings(name := "frameless-refined-spark32")
95+
.settings(sourceDirectory := (refined / sourceDirectory).value)
96+
.settings(refinedSettings)
97+
.settings(spark32Settings)
98+
.dependsOn(`dataset-spark32` % "test->test;compile->compile;provided->provided")
99+
93100
lazy val `refined-spark31` = project
94101
.settings(name := "frameless-refined-spark31")
95102
.settings(sourceDirectory := (refined / sourceDirectory).value)
96103
.settings(refinedSettings)
97104
.settings(spark31Settings)
98105
.dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided")
99106

100-
lazy val `refined-spark30` = project
101-
.settings(name := "frameless-refined-spark30")
102-
.settings(sourceDirectory := (refined / sourceDirectory).value)
103-
.settings(refinedSettings)
104-
.settings(spark30Settings)
105-
.dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided")
106-
107107
lazy val ml = project
108108
.settings(name := "frameless-ml")
109109
.settings(mlSettings)
@@ -113,26 +113,26 @@ lazy val ml = project
113113
dataset % "test->test;compile->compile;provided->provided"
114114
)
115115

116-
lazy val `ml-spark31` = project
117-
.settings(name := "frameless-ml-spark31")
116+
lazy val `ml-spark32` = project
117+
.settings(name := "frameless-ml-spark32")
118118
.settings(sourceDirectory := (ml / sourceDirectory).value)
119119
.settings(mlSettings)
120-
.settings(sparkMlDependencies(spark31Version))
121-
.settings(spark31Settings)
120+
.settings(sparkMlDependencies(spark32Version))
121+
.settings(spark32Settings)
122122
.dependsOn(
123123
core % "test->test;compile->compile",
124-
`dataset-spark31` % "test->test;compile->compile;provided->provided"
124+
`dataset-spark32` % "test->test;compile->compile;provided->provided"
125125
)
126126

127-
lazy val `ml-spark30` = project
128-
.settings(name := "frameless-ml-spark30")
127+
lazy val `ml-spark31` = project
128+
.settings(name := "frameless-ml-spark31")
129129
.settings(sourceDirectory := (ml / sourceDirectory).value)
130130
.settings(mlSettings)
131-
.settings(sparkMlDependencies(spark30Version))
132-
.settings(spark30Settings)
131+
.settings(sparkMlDependencies(spark31Version))
132+
.settings(spark31Settings)
133133
.dependsOn(
134134
core % "test->test;compile->compile",
135-
`dataset-spark30` % "test->test;compile->compile;provided->provided"
135+
`dataset-spark31` % "test->test;compile->compile;provided->provided"
136136
)
137137

138138
lazy val docs = project
@@ -258,12 +258,12 @@ lazy val framelessSettings = Seq(
258258
},
259259
) ++ consoleSettings
260260

261-
lazy val spark30Settings = Seq(
261+
lazy val spark31Settings = Seq(
262262
crossScalaVersions := Seq(Scala212)
263263
)
264264

265-
lazy val spark31Settings = Seq(
266-
crossScalaVersions := Seq(Scala212)
265+
lazy val spark32Settings = Seq(
266+
mimaPreviousArtifacts := Set.empty
267267
)
268268

269269
lazy val consoleSettings = Seq(
@@ -331,7 +331,7 @@ ThisBuild / githubWorkflowBuildPreamble ++= Seq(
331331
)
332332
)
333333

334-
val roots = List("root-spark30", "root-spark31", "root-spark32")
334+
val roots = List("root-spark31", "root-spark32", "root-spark33")
335335
ThisBuild / githubWorkflowBuildMatrixAdditions +=
336336
"project" -> roots
337337
ThisBuild / githubWorkflowArtifactDownloadExtraKeys += "project"

0 commit comments

Comments
 (0)