Skip to content

Commit 0c2a244

Browse files
marmbrusliancheng
authored andcommitted
[SPARK-3646][SQL] Copy SQL configuration from SparkConf when a SQLContext is created.
This will allow us to take advantage of things like the spark.defaults file. Author: Michael Armbrust <[email protected]> Closes apache#2493 from marmbrus/copySparkConf and squashes the following commits: 0bd1377 [Michael Armbrust] Copy SQL configuration from SparkConf when a SQLContext is created. Conflicts: sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala Conflicts: sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
1 parent 3202a36 commit 0c2a244

File tree

3 files changed

+21
-3
lines changed

3 files changed

+21
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,11 @@ class SQLContext(@transient val sparkContext: SparkContext)
7575
protected[sql] def executePlan(plan: LogicalPlan): this.QueryExecution =
7676
new this.QueryExecution { val logical = plan }
7777

78+
sparkContext.getConf.getAll.foreach {
79+
case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
80+
case _ =>
81+
}
82+
7883
/**
7984
* :: DeveloperApi ::
8085
* Allows catalyst LogicalPlans to be executed as a SchemaRDD. Note that the LogicalPlan

sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,16 @@
1717

1818
package org.apache.spark.sql.test
1919

20-
import org.apache.spark.{SparkConf, SparkContext}
2120
import org.apache.spark.sql.{SQLConf, SQLContext}
21+
import org.apache.spark.{SparkConf, SparkContext}
2222

2323
/** A SQLContext that can be used for local testing. */
2424
object TestSQLContext
25-
extends SQLContext(new SparkContext("local[2]", "TestSQLContext", new SparkConf())) {
25+
extends SQLContext(
26+
new SparkContext(
27+
"local[2]",
28+
"TestSQLContext",
29+
new SparkConf().set("spark.sql.testkey", "true"))) {
2630

2731
/** Fewer partitions to speed up testing. */
2832
override private[spark] def numShufflePartitions: Int =

sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,25 @@
1717

1818
package org.apache.spark.sql
1919

20+
import org.scalatest.FunSuiteLike
21+
2022
import org.apache.spark.sql.test._
2123

2224
/* Implicits */
2325
import TestSQLContext._
2426

25-
class SQLConfSuite extends QueryTest {
27+
class SQLConfSuite extends QueryTest with FunSuiteLike {
2628

2729
val testKey = "test.key.0"
2830
val testVal = "test.val.0"
2931

32+
test("propagate from spark conf") {
33+
// We create a new context here to avoid order dependence with other tests that might call
34+
// clear().
35+
val newContext = new SQLContext(TestSQLContext.sparkContext)
36+
assert(newContext.getConf("spark.sql.testkey", "false") == "true")
37+
}
38+
3039
test("programmatic ways of basic setting and getting") {
3140
clear()
3241
assert(getAllConfs.size === 0)

0 commit comments

Comments
 (0)