SparkNLP 1033: Introducing LLAVA #3740
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: build | |
on: | |
push: | |
paths-ignore: | |
- 'docs/**' | |
- 'conda/**' | |
- 'scripts/**' | |
- 'examples/**' | |
- '**.md' | |
- '**.yaml' | |
- '**.yml' | |
branches: | |
- 'master' | |
- '*release*' | |
- 'release/**' | |
- 'main' | |
pull_request: | |
paths-ignore: | |
- 'docs/**' | |
- 'conda/**' | |
- 'scripts/**' | |
- 'examples/**' | |
- '**.md' | |
- '**.yaml' | |
- '**.yml' | |
branches: | |
- 'master' | |
- '*release*' | |
- 'release/**' | |
- 'main' | |
jobs: | |
spark34: | |
if: "! contains(toJSON(github.event.commits.*.message), '[skip test]')" | |
runs-on: macos-13 | |
env: | |
TF_CPP_MIN_LOG_LEVEL: 3 | |
JAVA_OPTS: "-Xmx4096m -XX:+UseG1GC" | |
name: Build and Test on Apache Spark 3.4.x | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-java@v4 | |
with: | |
distribution: 'temurin' | |
java-version: '8' | |
cache: 'sbt' | |
- name: Install Python 3.7 | |
uses: actions/setup-python@v2 | |
with: | |
python-version: 3.7.7 | |
architecture: x64 | |
- name: Install Python packages (Python 3.7) | |
run: | | |
python -m pip install --upgrade pip | |
pip install pyspark==3.4.0 numpy pytest | |
- name: Build Spark NLP on Apache Spark 3.4.0 | |
run: | | |
brew install sbt | |
sbt -mem 4096 -Dis_spark34=true clean assemblyAndCopy | |
- name: Test Spark NLP in Scala - Apache Spark 3.4.x | |
run: | | |
sbt -mem 4096 coverage test | |
- name: Upload coverage data to Coveralls | |
run: sbt coverageReport coveralls | |
env: | |
COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
COVERALLS_FLAG_NAME: Apache Spark 3.4.x - Scala 2.12 | |
- name: Test Spark NLP in Python - Apache Spark 3.4.x | |
run: | | |
cd python | |
python3.7 -m pytest -v -m fast | |
spark35: | |
if: "! contains(toJSON(github.event.commits.*.message), '[skip test]')" | |
runs-on: macos-13 | |
env: | |
TF_CPP_MIN_LOG_LEVEL: 3 | |
JAVA_OPTS: "-Xmx4096m -XX:+UseG1GC" | |
name: Build and Test on Apache Spark 3.5.x | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: 'adopt' | |
java-version: '8' | |
cache: 'sbt' | |
- name: Install Python 3.10 | |
uses: actions/setup-python@v2 | |
with: | |
python-version: 3.10.12 | |
architecture: x64 | |
- name: Install Python packages (Python 3.10) | |
run: | | |
python -m pip install --upgrade pip | |
pip install pyspark==3.5.0 numpy pytest | |
- name: Build Spark NLP on Apache Spark 3.5.0 | |
run: | | |
brew install sbt | |
sbt -mem 4096 -Dis_spark35=true clean assemblyAndCopy | |
- name: Test Spark NLP in Scala - Apache Spark 3.5.x | |
run: | | |
sbt -mem 4096 test | |
- name: Test Spark NLP in Python - Apache Spark 3.5.x | |
run: | | |
cd python | |
python3.10 -m pytest -v -m fast | |
spark33: | |
if: "! contains(toJSON(github.event.commits.*.message), '[skip test]')" | |
runs-on: macos-13 | |
env: | |
TF_CPP_MIN_LOG_LEVEL: 3 | |
JAVA_OPTS: "-Xmx4096m -XX:+UseG1GC" | |
name: Build and Test on Apache Spark 3.3.x | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: 'adopt' | |
java-version: '8' | |
cache: 'sbt' | |
- name: Install Python 3.7 | |
uses: actions/setup-python@v2 | |
with: | |
python-version: 3.7.7 | |
architecture: x64 | |
- name: Install Python packages (Python 3.7) | |
run: | | |
python -m pip install --upgrade pip | |
pip install pyspark==3.3.1 numpy pytest | |
- name: Build Spark NLP on Apache Spark 3.3.1 | |
run: | | |
brew install sbt | |
sbt -mem 4096 -Dis_spark33=true clean assemblyAndCopy | |
- name: Test Spark NLP in Scala - Apache Spark 3.3.x | |
run: | | |
sbt -mem 4096 test | |
- name: Test Spark NLP in Python - Apache Spark 3.3.x | |
run: | | |
cd python | |
python3.7 -m pytest -v -m fast | |