Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions sdk/python/feast/infra/compute_engines/spark/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ def get_or_create_new_spark_session(
conf=SparkConf().setAll([(k, v) for k, v in spark_config.items()])
)

spark_builder = spark_builder.config("spark.driver.host", "127.0.0.1")
spark_builder = spark_builder.config("spark.driver.bindAddress", "127.0.0.1")

spark_session = spark_builder.getOrCreate()
spark_session.conf.set("spark.sql.execution.arrow.pyspark.enabled", "true")
return spark_session
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ def __init__(self, project_name: str, *args, **kwargs):
"spark.eventLog.enabled": "false",
"spark.sql.parser.quotedRegexColumnNames": "true",
"spark.sql.session.timeZone": "UTC",
"spark.driver.host": "127.0.0.1",
"spark.driver.bindAddress": "127.0.0.1",
}
if not self.spark_offline_store_config:
self.create_offline_store_config()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ def spark_session():
SparkSession.builder.appName("FeastSparkTests")
.master("local[*]")
.config("spark.sql.shuffle.partitions", "1")
.config("spark.driver.host", "127.0.0.1")
.config("spark.driver.bindAddress", "127.0.0.1")
.getOrCreate()
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,12 @@ def remove_extra_spaces_sql(df, column_name):

@pytest.fixture
def spark_fixture():
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
spark = (
SparkSession.builder.appName("Testing PySpark Example")
.config("spark.driver.host", "127.0.0.1")
.config("spark.driver.bindAddress", "127.0.0.1")
.getOrCreate()
)
try:
yield spark
finally:
Expand All @@ -61,7 +66,12 @@ def spark_fixture():

@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")
def test_spark_transformation(spark_fixture):
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
spark = (
SparkSession.builder.appName("Testing PySpark Example")
.config("spark.driver.host", "127.0.0.1")
.config("spark.driver.bindAddress", "127.0.0.1")
.getOrCreate()
)
df = get_sample_df(spark)

spark_transformation = Transformation(
Expand All @@ -77,7 +87,12 @@ def test_spark_transformation(spark_fixture):

@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")
def test_spark_transformation_init_transformation(spark_fixture):
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
spark = (
SparkSession.builder.appName("Testing PySpark Example")
.config("spark.driver.host", "127.0.0.1")
.config("spark.driver.bindAddress", "127.0.0.1")
.getOrCreate()
)
df = get_sample_df(spark)

spark_transformation = SparkTransformation(
Expand All @@ -93,7 +108,12 @@ def test_spark_transformation_init_transformation(spark_fixture):

@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")
def test_spark_transformation_sql(spark_fixture):
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
spark = (
SparkSession.builder.appName("Testing PySpark Example")
.config("spark.driver.host", "127.0.0.1")
.config("spark.driver.bindAddress", "127.0.0.1")
.getOrCreate()
)
df = get_sample_df(spark)

spark_transformation = SparkTransformation(
Expand Down
Loading