From 005d8af827d98b150807dcae4d83fec309095a8f Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Wed, 28 Aug 2024 10:00:56 +0900 Subject: [PATCH] fixup --- binder/Dockerfile | 2 +- binder/postBuild | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/binder/Dockerfile b/binder/Dockerfile index 6e3dd9155fb7a..7c6da19abbf9f 100644 --- a/binder/Dockerfile +++ b/binder/Dockerfile @@ -17,7 +17,7 @@ FROM python:3.10-slim # install the notebook package -RUN pip install --no-cache notebook jupyterlab +RUN pip install --no-cache notebook jupyterlab # create user with a home directory ARG NB_USER diff --git a/binder/postBuild b/binder/postBuild index 4478a53f84fb4..7e6f32e691419 100755 --- a/binder/postBuild +++ b/binder/postBuild @@ -44,16 +44,16 @@ else fi # Set 'PYARROW_IGNORE_TIMEZONE' to suppress warnings from PyArrow. -echo "export PYARROW_IGNORE_TIMEZONE=1" >> ~/.bash_profile +echo "export PYARROW_IGNORE_TIMEZONE=1" >> ~/.bashrc # Add sbin to PATH to run `start-connect-server.sh`. SPARK_HOME=$(python -c "from pyspark.find_spark_home import _find_spark_home; print(_find_spark_home())") -echo "export PATH=${PATH}:${SPARK_HOME}/sbin" >> ~/.bash_profile -echo "export SPARK_HOME=${SPARK_HOME}" >> ~/.bash_profile +echo "export PATH=${PATH}:${SPARK_HOME}/sbin" >> ~/.bashrc +echo "export SPARK_HOME=${SPARK_HOME}" >> ~/.bashrc # Add Spark version to env for running command dynamically based on Spark version. SPARK_VERSION=$(python -c "import pyspark; print(pyspark.__version__)") -echo "export SPARK_VERSION=${SPARK_VERSION}" >> ~/.bash_profile +echo "export SPARK_VERSION=${SPARK_VERSION}" >> ~/.bashrc # Suppress warnings from Spark jobs, and UI progress bar. mkdir -p ~/.ipython/profile_default/startup