Skip to content

Commit c2872f6

Browse files
authored
fix: only import pyspark if using databricks connect (#2432)
1 parent 8f8e18f commit c2872f6

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

sqlmesh/core/engine_adapter/databricks.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -137,11 +137,11 @@ def fetchdf(
137137
return df
138138

139139
def get_current_catalog(self) -> t.Optional[str]:
140-
from py4j.protocol import Py4JError
141-
from pyspark.errors.exceptions.connect import SparkConnectGrpcException
142-
143-
# Update the Dataframe API is we have a spark session
140+
# Update the Dataframe API if we have a spark session
144141
if self._use_spark_session:
142+
from py4j.protocol import Py4JError
143+
from pyspark.errors.exceptions.connect import SparkConnectGrpcException
144+
145145
try:
146146
# Note: Spark 3.4+ Only API
147147
return super().get_current_catalog()
@@ -153,15 +153,15 @@ def get_current_catalog(self) -> t.Optional[str]:
153153
return None
154154

155155
def set_current_catalog(self, catalog_name: str) -> None:
156-
from py4j.protocol import Py4JError
157-
from pyspark.errors.exceptions.connect import SparkConnectGrpcException
158-
159156
# Since Databricks splits commands across the Dataframe API and the SQL Connector
160157
# (depending if databricks-connect is installed and a Dataframe is used) we need to ensure both
161158
# are set to the same catalog since they maintain their default catalog seperately
162159
self.execute(exp.Use(this=exp.to_identifier(catalog_name), kind="CATALOG"))
163160
# Update the Dataframe API is we have a spark session
164161
if self._use_spark_session:
162+
from py4j.protocol import Py4JError
163+
from pyspark.errors.exceptions.connect import SparkConnectGrpcException
164+
165165
try:
166166
# Note: Spark 3.4+ Only API
167167
super().set_current_catalog(catalog_name)

0 commit comments

Comments
 (0)