@@ -137,11 +137,11 @@ def fetchdf(
137137 return df
138138
139139 def get_current_catalog (self ) -> t .Optional [str ]:
140- from py4j .protocol import Py4JError
141- from pyspark .errors .exceptions .connect import SparkConnectGrpcException
142-
143- # Update the Dataframe API is we have a spark session
140+ # Update the Dataframe API if we have a spark session
144141 if self ._use_spark_session :
142+ from py4j .protocol import Py4JError
143+ from pyspark .errors .exceptions .connect import SparkConnectGrpcException
144+
145145 try :
146146 # Note: Spark 3.4+ Only API
147147 return super ().get_current_catalog ()
@@ -153,15 +153,15 @@ def get_current_catalog(self) -> t.Optional[str]:
153153 return None
154154
155155 def set_current_catalog (self , catalog_name : str ) -> None :
156- from py4j .protocol import Py4JError
157- from pyspark .errors .exceptions .connect import SparkConnectGrpcException
158-
159156 # Since Databricks splits commands across the Dataframe API and the SQL Connector
160157 # (depending if databricks-connect is installed and a Dataframe is used) we need to ensure both
161158 # are set to the same catalog since they maintain their default catalog seperately
162159 self .execute (exp .Use (this = exp .to_identifier (catalog_name ), kind = "CATALOG" ))
163160 # Update the Dataframe API is we have a spark session
164161 if self ._use_spark_session :
162+ from py4j .protocol import Py4JError
163+ from pyspark .errors .exceptions .connect import SparkConnectGrpcException
164+
165165 try :
166166 # Note: Spark 3.4+ Only API
167167 super ().set_current_catalog (catalog_name )
0 commit comments