Pointing multiple spark versions in the same project
conda create --name databricks-connect python=3.10 -y
conda activate databricks-connect
pip install databricks-connect==13.*
export _SITE=" $( python -c ' import sys;print(sys.path[-1])' ) "
export SPARK_HOME=" $_SITE /pyspark"
export PYSPARK_PYTHON=" $( which python) "
export PYSPARK_DRIVER_PYTHON=$PYSPARK_PYTHON
export PATH=$SPARK_HOME /bin:$PATH
export PYTHONPATH=$SPARK_HOME /python/:$PYTHONPATH
export PYTHONPATH=$_SITE :$SPARK_HOME /python/lib/* .zip:$PYTHONPATH
echo " SPARK_HOME=${SPARK_HOME} ;PYTHONPATH=${PYTHONPATH} ;PATH=${PATH} "
# paste to the pycharm run configuration environment
$ conda deactivate
export SPARK_HOME=$HOME /spark-3.4/spark-3.4.1-bin-hadoop3
export PYSPARK_PYTHON=$( which python)
export PYSPARK_DRIVER_PYTHON=$PYSPARK_PYTHON
export PATH=$SPARK_HOME /bin:$PATH
export PYTHONPATH=$SPARK_HOME /python/:$PYTHONPATH
export PYTHONPATH=$SPARK_HOME /python/lib/* .zip:$PYTHONPATH
echo " SPARK_HOME=${SPARK_HOME} ;PYTHONPATH=${PYTHONPATH} ;PATH=${PATH} "
# paste to the pycharm run configuration environment
Yes, we use 13 LTS version. Not sure how steps will divert due to the runtime change. Technically it might work, but you need to try