Skip to content

Commit 6c6ce1b

Browse files
SPARKC-710: Update component versions (#1367)
Updated Cassandra driver version and Cassandra versions used for testing
1 parent dbbf028 commit 6c6ce1b

24 files changed

+230
-160
lines changed

.github/workflows/main.yml

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -13,32 +13,38 @@ jobs:
1313
build:
1414
runs-on: ubuntu-latest
1515
strategy:
16+
fail-fast: false
1617
matrix:
17-
scala: [2.12.11, 2.13.11]
18-
db-version: [3.11.10, 4.0-rc2, 6.8.13]
18+
scala: [2.12.19, 2.13.13]
19+
db-version: [3.11.17, 4.0.12, 4.1.4, 5.0-beta1, dse-6.8.44]
1920

2021
steps:
21-
- uses: actions/checkout@v2
22+
- uses: actions/checkout@v4
2223

2324
- name: ccm pip installation
2425
uses: BSFishy/pip-action@v1
2526
with:
26-
packages: git+https://github.com/riptano/ccm.git@435f3210e16d0b648fbf33d6390d5ab4c9e630d4
27+
packages: git+https://github.com/riptano/ccm.git@d74db63d75112908a77b6c80757df9343fdc3338
2728

28-
- name: Setup Scala
29-
uses: olafurpg/setup-scala@v10
29+
- name: Setup Java
30+
uses: actions/setup-java@v4
3031
with:
31-
java-version: "[email protected]"
32+
distribution: "temurin"
33+
java-version: | # order is important, the last one is the default which will be used by SBT
34+
11
35+
8
3236
3337
- name: sbt tests
3438
env:
3539
TEST_PARALLEL_TASKS: 1
3640
CCM_CASSANDRA_VERSION: ${{ matrix.db-version }}
3741
PUBLISH_VERSION: test
42+
JAVA8_HOME: ${{ env.JAVA_HOME_8_X64 }}
43+
JAVA11_HOME: ${{ env.JAVA_HOME_11_X64 }}
3844
run: sbt/sbt ++${{ matrix.scala }} test it:test
3945

4046
- name: Publish Test Report
41-
uses: mikepenz/action-junit-report@v3
47+
uses: mikepenz/action-junit-report@v4
4248
if: always()
4349
with:
4450
report_paths: '**/target/test-reports/*.xml'

README.md

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -53,26 +53,27 @@ Currently, the following branches are actively supported:
5353
3.0.x ([b3.0](https://github.com/datastax/spark-cassandra-connector/tree/b3.0)) and
5454
2.5.x ([b2.5](https://github.com/datastax/spark-cassandra-connector/tree/b2.5)).
5555

56-
| Connector | Spark | Cassandra | Cassandra Java Driver | Minimum Java Version | Supported Scala Versions |
57-
|-----------|---------------|-----------------------| --------------------- | -------------------- | ----------------------- |
58-
| 3.5 | 3.5 | 2.1.5*, 2.2, 3.x, 4.x | 4.13 | 8 | 2.12, 2.13 |
59-
| 3.4 | 3.4 | 2.1.5*, 2.2, 3.x, 4.x | 4.13 | 8 | 2.12, 2.13 |
60-
| 3.3 | 3.3 | 2.1.5*, 2.2, 3.x, 4.x | 4.13 | 8 | 2.12 |
61-
| 3.2 | 3.2 | 2.1.5*, 2.2, 3.x, 4.0 | 4.13 | 8 | 2.12 |
62-
| 3.1 | 3.1 | 2.1.5*, 2.2, 3.x, 4.0 | 4.12 | 8 | 2.12 |
63-
| 3.0 | 3.0 | 2.1.5*, 2.2, 3.x, 4.0 | 4.12 | 8 | 2.12 |
64-
| 2.5 | 2.4 | 2.1.5*, 2.2, 3.x, 4.0 | 4.12 | 8 | 2.11, 2.12 |
65-
| 2.4.2 | 2.4 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.11, 2.12 |
66-
| 2.4 | 2.4 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.11 |
67-
| 2.3 | 2.3 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.11 |
68-
| 2.0 | 2.0, 2.1, 2.2 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.10, 2.11 |
69-
| 1.6 | 1.6 | 2.1.5*, 2.2, 3.0 | 3.0 | 7 | 2.10, 2.11 |
70-
| 1.5 | 1.5, 1.6 | 2.1.5*, 2.2, 3.0 | 3.0 | 7 | 2.10, 2.11 |
71-
| 1.4 | 1.4 | 2.1.5* | 2.1 | 7 | 2.10, 2.11 |
72-
| 1.3 | 1.3 | 2.1.5* | 2.1 | 7 | 2.10, 2.11 |
73-
| 1.2 | 1.2 | 2.1, 2.0 | 2.1 | 7 | 2.10, 2.11 |
74-
| 1.1 | 1.1, 1.0 | 2.1, 2.0 | 2.1 | 7 | 2.10, 2.11 |
75-
| 1.0 | 1.0, 0.9 | 2.0 | 2.0 | 7 | 2.10, 2.11 |
56+
| Connector | Spark | Cassandra | Cassandra Java Driver | Minimum Java Version | Supported Scala Versions |
57+
|-----------|---------------|----------------------------|-----------------------|----------------------|--------------------------|
58+
| 3.5.1 | 3.5 | 2.1.5*, 2.2, 3.x, 4.x, 5.0 | 4.18 | 8 | 2.12, 2.13 |
59+
| 3.5 | 3.5 | 2.1.5*, 2.2, 3.x, 4.x | 4.13 | 8 | 2.12, 2.13 |
60+
| 3.4 | 3.4 | 2.1.5*, 2.2, 3.x, 4.x | 4.13 | 8 | 2.12, 2.13 |
61+
| 3.3 | 3.3 | 2.1.5*, 2.2, 3.x, 4.x | 4.13 | 8 | 2.12 |
62+
| 3.2 | 3.2 | 2.1.5*, 2.2, 3.x, 4.0 | 4.13 | 8 | 2.12 |
63+
| 3.1 | 3.1 | 2.1.5*, 2.2, 3.x, 4.0 | 4.12 | 8 | 2.12 |
64+
| 3.0 | 3.0 | 2.1.5*, 2.2, 3.x, 4.0 | 4.12 | 8 | 2.12 |
65+
| 2.5 | 2.4 | 2.1.5*, 2.2, 3.x, 4.0 | 4.12 | 8 | 2.11, 2.12 |
66+
| 2.4.2 | 2.4 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.11, 2.12 |
67+
| 2.4 | 2.4 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.11 |
68+
| 2.3 | 2.3 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.11 |
69+
| 2.0 | 2.0, 2.1, 2.2 | 2.1.5*, 2.2, 3.x | 3.0 | 8 | 2.10, 2.11 |
70+
| 1.6 | 1.6 | 2.1.5*, 2.2, 3.0 | 3.0 | 7 | 2.10, 2.11 |
71+
| 1.5 | 1.5, 1.6 | 2.1.5*, 2.2, 3.0 | 3.0 | 7 | 2.10, 2.11 |
72+
| 1.4 | 1.4 | 2.1.5* | 2.1 | 7 | 2.10, 2.11 |
73+
| 1.3 | 1.3 | 2.1.5* | 2.1 | 7 | 2.10, 2.11 |
74+
| 1.2 | 1.2 | 2.1, 2.0 | 2.1 | 7 | 2.10, 2.11 |
75+
| 1.1 | 1.1, 1.0 | 2.1, 2.0 | 2.1 | 7 | 2.10, 2.11 |
76+
| 1.0 | 1.0, 0.9 | 2.0 | 2.0 | 7 | 2.10, 2.11 |
7677

7778
**Compatible with 2.1.X where X >= 5*
7879

@@ -193,14 +194,13 @@ Note that the integration tests require [CCM](https://github.com/riptano/ccm) to
193194
See [Tips for Developing the Spark Cassandra Connector](doc/developers.md) for details.
194195

195196
By default, integration tests start up a separate, single Cassandra instance and run Spark in local mode.
196-
It is possible to run integration tests with your own Cassandra and/or Spark cluster.
197+
It is possible to run integration tests with your own Spark cluster.
197198
First, prepare a jar with testing code:
198199

199200
./sbt/sbt test:package
200201

201202
Then copy the generated test jar to your Spark nodes and run:
202203

203-
export IT_TEST_CASSANDRA_HOST=<IP of one of the Cassandra nodes>
204204
export IT_TEST_SPARK_MASTER=<Spark Master URL>
205205
./sbt/sbt it:test
206206

build.sbt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ import sbt.Keys.parallelExecution
33
import sbt.{Compile, moduleFilter, _}
44
import sbtassembly.AssemblyPlugin.autoImport.assembly
55

6-
lazy val scala212 = "2.12.11"
7-
lazy val scala213 = "2.13.11"
6+
lazy val scala212 = "2.12.19"
7+
lazy val scala213 = "2.13.13"
88
lazy val supportedScalaVersions = List(scala212, scala213)
99

1010
// factor out common settings
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
# Set everything to be logged to the console
19+
rootLogger.level = warn
20+
rootLogger.appenderRef.stdout.ref = console
21+
22+
appender.console.type = Console
23+
appender.console.name = console
24+
appender.console.target = SYSTEM_OUT
25+
appender.console.layout.type = PatternLayout
26+
appender.console.layout.pattern = %5p %d{HH:mm:ss,SSS} [T%X{TEST_GROUP_NO}] %C (%F:%L) - %m%n
27+
28+
logger.ccm.name = com.datastax.spark.connector.ccm
29+
logger.ccm.level = info

connector/src/it/resources/logback.xml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,5 @@
1919
<appender-ref ref="STDOUT" />
2020
</root>
2121

22+
<logger name="com.datastax.spark.connector.ccm" level="INFO"/>
2223
</configuration>

connector/src/it/scala/com/datastax/spark/connector/cluster/Fixtures.scala

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -136,9 +136,15 @@ trait AuthCluster extends SingleClusterFixture {
136136
"authentication_options.enabled" -> "true"
137137
)))
138138
} else {
139-
Seq(sslConf.copy(cassandraConfiguration = sslConf.cassandraConfiguration ++ Map(
140-
"authenticator" -> "PasswordAuthenticator"
141-
)))
139+
if (defaultConfig.getCassandraVersion.compareTo(CcmConfig.V5_0_0) >= 0) {
140+
Seq(sslConf.copy(cassandraConfiguration = sslConf.cassandraConfiguration ++ Map(
141+
"authenticator.class_name" -> "PasswordAuthenticator"
142+
)))
143+
} else {
144+
Seq(sslConf.copy(cassandraConfiguration = sslConf.cassandraConfiguration ++ Map(
145+
"authenticator" -> "PasswordAuthenticator"
146+
)))
147+
}
142148
}
143149
}
144150

connector/src/it/scala/com/datastax/spark/connector/cql/sai/IndexedKeySpec.scala

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
package com.datastax.spark.connector.cql.sai
22

33
import com.datastax.spark.connector.SparkCassandraITWordSpecBase
4-
import com.datastax.spark.connector.ccm.CcmConfig.V6_8_3
4+
import com.datastax.spark.connector.ccm.CcmConfig.DSE_V6_8_3
55
import com.datastax.spark.connector.cluster.DefaultCluster
66
import org.apache.spark.sql.functions.col
77
import org.apache.spark.sql.sources._
88

99
class IndexedKeySpec extends SparkCassandraITWordSpecBase with DefaultCluster with SaiBaseSpec {
1010

1111
override def beforeClass {
12-
dseFrom(V6_8_3) {
12+
dseFrom(DSE_V6_8_3) {
1313
conn.withSessionDo { session =>
1414
createKeyspace(session, ks)
1515
session.execute(
@@ -46,7 +46,7 @@ class IndexedKeySpec extends SparkCassandraITWordSpecBase with DefaultCluster wi
4646
}
4747

4848
"Index on partition key columns" should {
49-
"allow for predicate push down for indexed parts of the partition key" in dseFrom(V6_8_3) {
49+
"allow for predicate push down for indexed parts of the partition key" in dseFrom(DSE_V6_8_3) {
5050
assertPushedPredicate(
5151
df("pk_test").filter(col("pk_1") === 1),
5252
pushedPredicate = EqualTo("pk_1", 1))
@@ -64,13 +64,13 @@ class IndexedKeySpec extends SparkCassandraITWordSpecBase with DefaultCluster wi
6464
pushedPredicate = GreaterThanOrEqual("pk_2", 1))
6565
}
6666

67-
"allow for multiple predicate push down for the same indexed part of the partition key" in dseFrom(V6_8_3) {
67+
"allow for multiple predicate push down for the same indexed part of the partition key" in dseFrom(DSE_V6_8_3) {
6868
assertPushedPredicate(
6969
df("pk_test").filter(col("pk_3") < 10 and col("pk_3") > 0),
7070
pushedPredicate = LessThan("pk_3", 10), GreaterThan("pk_3", 0))
7171
}
7272

73-
"allow for multiple range predicate push down for different indexed parts of the partition key" in dseFrom(V6_8_3) {
73+
"allow for multiple range predicate push down for different indexed parts of the partition key" in dseFrom(DSE_V6_8_3) {
7474
assertPushedPredicate(
7575
df("pk_test").filter(col("pk_3") < 10 and col("pk_1") > 0),
7676
pushedPredicate = LessThan("pk_3", 10), GreaterThan("pk_1", 0))
@@ -82,7 +82,7 @@ class IndexedKeySpec extends SparkCassandraITWordSpecBase with DefaultCluster wi
8282
pushedPredicate = EqualTo("pk_3", 10), LessThan("v_1", 1))
8383
}
8484

85-
"allow for range predicate push down for the partition key" in dseFrom(V6_8_3) {
85+
"allow for range predicate push down for the partition key" in dseFrom(DSE_V6_8_3) {
8686
assertPushedPredicate(
8787
df("pk_test").filter(col("pk_3") < 10 and col("pk_1") > 0 and col("pk_2") >= 0),
8888
pushedPredicate = LessThan("pk_3", 10), GreaterThan("pk_1", 0), GreaterThanOrEqual("pk_2", 0))
@@ -91,7 +91,7 @@ class IndexedKeySpec extends SparkCassandraITWordSpecBase with DefaultCluster wi
9191
pushedPredicate = EqualTo("pk_3", 10), LessThan("pk_1", 6), EqualTo("pk_2", 1))
9292
}
9393

94-
"not allow for regular column predicate push down if any part of the partition key has an IN clause" in dseFrom(V6_8_3) {
94+
"not allow for regular column predicate push down if any part of the partition key has an IN clause" in dseFrom(DSE_V6_8_3) {
9595
assertNonPushedColumns(
9696
df("pk_test").filter("pk_1 = 1 and pk_2 = 2 and pk_3 in(1, 3) and v_1 < 5"),
9797
nonPushedColumns = "v_1")
@@ -103,32 +103,32 @@ class IndexedKeySpec extends SparkCassandraITWordSpecBase with DefaultCluster wi
103103
nonPushedColumns = "v_1")
104104
}
105105

106-
"allow for regular column predicate push down if a part of the clustering key has an IN clause" in dseFrom(V6_8_3) {
106+
"allow for regular column predicate push down if a part of the clustering key has an IN clause" in dseFrom(DSE_V6_8_3) {
107107
assertPushedPredicate(
108108
df("pk_test").filter("pk_1 = 1 and pk_2 = 2 and pk_3 = 3 and ck_1 in (1,2) and v_1 < 5"),
109109
pushedPredicate = EqualTo("pk_1", 1), EqualTo("pk_2", 2), EqualTo("pk_3", 3), In("ck_1", Array(1, 2)), LessThan("v_1", 5))
110110
}
111111

112-
"not allow for push down if more than one equality predicate is defined" in dseFrom(V6_8_3) {
112+
"not allow for push down if more than one equality predicate is defined" in dseFrom(DSE_V6_8_3) {
113113
val data = df("pk_test").filter(col("pk_1") === 7 and col("pk_1") === 10)
114114
assertPushedPredicate(data, pushedPredicate = EqualTo("pk_1", 7))
115115
}
116116

117-
"allow only for equality push down if equality and range predicates are defined for the same pk column" in dseFrom(V6_8_3) {
117+
"allow only for equality push down if equality and range predicates are defined for the same pk column" in dseFrom(DSE_V6_8_3) {
118118
val data = df("pk_test").filter(col("pk_1") === 7 and col("pk_1") < 10)
119119
assertPushedPredicate(data, pushedPredicate = EqualTo("pk_1", 7))
120120
data.count() shouldBe 2
121121
}
122122
}
123123

124124
"Index on clustering key columns" should {
125-
"allow for predicate push down for indexed parts of the clustering key" in dseFrom(V6_8_3) {
125+
"allow for predicate push down for indexed parts of the clustering key" in dseFrom(DSE_V6_8_3) {
126126
assertPushedPredicate(
127127
df("pk_test").filter(col("ck_2") === 1),
128128
pushedPredicate = EqualTo("ck_2", 1))
129129
}
130130

131-
"not allow for predicate push down for non-indexed parts of the clustering key" in dseFrom(V6_8_3) {
131+
"not allow for predicate push down for non-indexed parts of the clustering key" in dseFrom(DSE_V6_8_3) {
132132
assertNoPushDown(df("pk_test").filter(col("ck_3") === 1))
133133
}
134134
}

connector/src/it/scala/com/datastax/spark/connector/cql/sai/IndexedListSpec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
package com.datastax.spark.connector.cql.sai
22

33
import com.datastax.spark.connector.SparkCassandraITWordSpecBase
4-
import com.datastax.spark.connector.ccm.CcmConfig.V6_8_3
4+
import com.datastax.spark.connector.ccm.CcmConfig.DSE_V6_8_3
55
import com.datastax.spark.connector.cluster.DefaultCluster
66

77

88
class IndexedListSpec extends SparkCassandraITWordSpecBase with DefaultCluster with SaiCollectionBaseSpec {
99

1010
override def beforeClass {
11-
dseFrom(V6_8_3) {
11+
dseFrom(DSE_V6_8_3) {
1212
conn.withSessionDo { session =>
1313
createKeyspace(session, ks)
1414
session.execute(

0 commit comments

Comments
 (0)