Skip to content

Commit

Permalink
[cross version tests] Clean up commands for installing dev versions (m…
Browse files Browse the repository at this point in the history
…lflow#4534)

* Clean up pip installation commands for dev versions

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>

* remove pip freeze grep

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>

* remove unrelated change

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>

* Fix pyspark command

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>

* Fix catboost command

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>

* Fix catboost command

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>

* remove -

Signed-off-by: harupy <17039389+harupy@users.noreply.github.com>
  • Loading branch information
harupy authored Jul 8, 2021
1 parent e0e7181 commit c96f4d8
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 49 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,4 @@ pytest dev/set_matrix.py --doctest-modules

1. Click `Labels` in the right sidebar.
2. Select the `enable-dev-tests` label.
3. Push a commit or re-run the `Cross version-tests` workflow.
3. Push a commit or re-run the `Cross version tests` workflow.
63 changes: 15 additions & 48 deletions mlflow/ml-package-versions.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@ sklearn:
package_info:
pip_release: "scikit-learn"
install_dev: |
head_sha=$(git ls-remote https://github.com/scikit-learn/scikit-learn.git HEAD | cut -f1)
pip install git+https://github.com/scikit-learn/scikit-learn.git@$head_sha
pip install git+https://github.com/scikit-learn/scikit-learn.git
models:
minimum: "0.20.3"
Expand Down Expand Up @@ -35,8 +34,7 @@ pytorch-lightning:
package_info:
pip_release: "pytorch-lightning"
install_dev: |
head_sha=$(git ls-remote https://github.com/PytorchLightning/pytorch-lightning.git HEAD | cut -f1)
pip install git+https://github.com/PytorchLightning/pytorch-lightning.git@$head_sha
pip install git+https://github.com/PytorchLightning/pytorch-lightning.git
autologging:
minimum: "1.0.5"
Expand Down Expand Up @@ -121,11 +119,7 @@ xgboost:
package_info:
pip_release: "xgboost"
install_dev: |
temp_dir=$(mktemp -d)
git clone --recursive https://github.com/dmlc/xgboost.git $temp_dir
git --git-dir=$temp_dir/.git rev-parse HEAD
cd $temp_dir/python-package
python setup.py install
pip install git+https://github.com/dmlc/xgboost.git#subdirectory=python-package
models:
minimum: "0.90"
Expand All @@ -145,11 +139,7 @@ lightgbm:
package_info:
pip_release: "lightgbm"
install_dev: |
temp_dir=$(mktemp -d)
git clone --recursive https://github.com/microsoft/LightGBM.git $temp_dir
git --git-dir=$temp_dir/.git rev-parse HEAD
cd $temp_dir/python-package
python setup.py install
pip install git+https://github.com/microsoft/LightGBM.git#subdirectory=python-package
models:
minimum: "2.3.1"
Expand All @@ -170,24 +160,12 @@ catboost:
pip_release: "catboost"
install_dev: |
# The cross-version-tests workflow runs this command with the environment variable `CACHE_DIR`
if [ -d "$CACHE_DIR" ] && [ ! -z $(find $CACHE_DIR -type f -name "catboost-*.whl") ]; then
pip install $(find $CACHE_DIR -type f -name "catboost-*.whl")
else
# Build wheel from source
temp_dir=$(mktemp -d)
git clone --depth 1 https://github.com/catboost/catboost.git $temp_dir
git --git-dir=$temp_dir/.git rev-parse HEAD
cd $temp_dir/catboost/python-package
python setup.py bdist_wheel
# Copy wheel in cache directory
wheel_path=$(find dist -type f -name "*.whl")
mkdir -p $CACHE_DIR
cp $wheel_path $CACHE_DIR
# Install wheel
pip install $wheel_path
if [ ! -d "$CACHE_DIR" ] || [ -z $(find $CACHE_DIR -type f -name "catboost-*.whl") ]; then
head_sha=$(git ls-remote https://github.com/catboost/catboost.git HEAD | cut -f1)
pip wheel --no-deps --wheel-dir $CACHE_DIR \
git+https://github.com/catboost/catboost.git@$head_sha#subdirectory=catboost/python-package
fi
pip install $(find $CACHE_DIR -type f -name "catboost-*.whl")
models:
minimum: "0.23.1"
Expand Down Expand Up @@ -242,12 +220,7 @@ onnx:
pip_release: "onnx"
install_dev: |
sudo apt-get install protobuf-compiler libprotoc-dev
temp_dir=$(mktemp -d)
git clone https://github.com/onnx/onnx.git $temp_dir
git --git-dir=$temp_dir/.git rev-parse HEAD
cd $temp_dir
git submodule update --init --recursive
python setup.py install
pip install git+https://github.com/onnx/onnx.git
models:
minimum: "1.5.0"
Expand All @@ -260,8 +233,7 @@ spacy:
package_info:
pip_release: "spacy"
install_dev: |
head_sha=$(git ls-remote https://github.com/explosion/spaCy.git HEAD | cut -f1)
pip install git+https://github.com/explosion/spaCy.git@$head_sha
pip install git+https://github.com/explosion/spaCy.git
models:
minimum: "2.2.4"
Expand All @@ -274,8 +246,7 @@ statsmodels:
package_info:
pip_release: "statsmodels"
install_dev: |
head_sha=$(git ls-remote https://github.com/statsmodels/statsmodels.git HEAD | cut -f1)
pip install git+https://github.com/statsmodels/statsmodels.git@$head_sha
pip install git+https://github.com/statsmodels/statsmodels.git
models:
minimum: "0.11.1"
Expand All @@ -294,9 +265,7 @@ spark:
pip_release: "pyspark"
install_dev: |
# The cross-version-tests workflow runs this command with the environment variable `CACHE_DIR`
if [ -d "$CACHE_DIR" ] && [ ! -z $(find $CACHE_DIR -type f -name "pyspark-*.whl") ]; then
pip install $(find $CACHE_DIR -type f -name "pyspark-*.whl")
else
if [ ! -d "$CACHE_DIR" ] || [ -z $(find $CACHE_DIR -type f -name "pyspark-*.whl") ]; then
# Build wheel from source
temp_dir=$(mktemp -d)
git clone --depth 1 https://github.com/apache/spark.git $temp_dir
Expand All @@ -308,13 +277,11 @@ spark:
python setup.py bdist_wheel
# Copy wheel in cache directory
wheel_path=$(find dist -type f -name "*.whl")
wheel_path=$(find dist -type f -name "pyspark-*.whl")
mkdir -p $CACHE_DIR
cp $wheel_path $CACHE_DIR
# Install wheel
pip install $wheel_path
fi
pip install $(find $CACHE_DIR -type f -name "pyspark-*.whl")
models:
minimum: "3.0.0"
Expand Down

0 comments on commit c96f4d8

Please sign in to comment.