Skip to content

Add integration test to spark workloads #35

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Mar 20, 2019
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ lint:
find-missing-version:
@./build/find-missing-version.sh

test-examples:
$(MAKE) registry-all
@./build/test-examples.sh

###############
# CI Commands #
###############
Expand Down
87 changes: 87 additions & 0 deletions build/test-examples.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
#!/bin/bash

# Copyright 2019 Cortex Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -eou pipefail

ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. >/dev/null && pwd)"
CORTEX="$ROOT/bin/cortex"

for example in $ROOT/examples/*/app.yaml; do
timer=1200
example_base_dir=$(dirname "${example}")
retry="false"

cd $example_base_dir
echo "Deploying $example_base_dir"
$CORTEX refresh

api_names="$($CORTEX get api | sed '1,2d' | sed '/^$/d' | tr -s ' ' | cut -f 1 -d " ")"
sample="$(find . -name "*.json")"

while true; do
current_status="$($CORTEX status)"
echo "$current_status"

error_count="$(echo $current_status | { grep "error" || test $? = 1; } | wc -l)"
# accomodate transient error `error: failed to connect to operator...`
if [ $error_count -gt "0" ] && [[ ! $current_status =~ ^error\:\ failed\ to\ connect\ to\ the\ operator.* ]]; then
exit 1
fi

ready_count="$($CORTEX get api | sed '1,2d' | sed '/^$/d' | { grep "ready" || test $? = 1; } | wc -l)"
total_count="$($CORTEX get api | sed '1,2d' | sed '/^$/d' | wc -l)"

sleep 15 # account for API startup delay

if [ "$ready_count" == "$total_count" ] && [ $total_count -ne "0" ]; then
for api_name in $api_names; do
echo "Running cx predict $api_name $sample"
result="$($CORTEX predict $api_name $sample)"
prediction_exit_code=$?
echo "$result"
if [ $prediction_exit_code -ne 0 ]; then
# accomodate transient error `error: failed to connect to operator...`
# handle `error: api ... is updating` error caused when the API status is set to `ready` but it actually isn't
if [[ $result =~ ^error\:\ failed\ to\ connect\ to\ the\ operator.* ]] || [[ $result =~ ^error\:\ api.*is\ updating$ ]]; then
echo "retrying prediction..."
$retry="true"
break # skip request predictions from the remaining APIs and try again
else
echo "prediction failed"
exit 1
fi
fi
done

if [ "$retry" == "false" ]; then
break # successfully got predictions from all APIs for this example, move into the next
else
retry="false"
fi
fi

timer=$((timer-15))
echo "Running $example_base_dir. $timer seconds left before timing out."
if [ $timer -lt "0" ]; then
echo "timed out!"
exit 1
fi
done

$CORTEX delete
done

echo "Ran all examples successfully."
3 changes: 3 additions & 0 deletions images/test/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ FROM cortexlabs/spark
RUN pip3 install pytest mock

COPY pkg/workloads /src
COPY pkg/aggregators /aggregators
COPY pkg/transformers /transformers

COPY images/test/run.sh /src/run.sh

WORKDIR /src
Expand Down
7 changes: 4 additions & 3 deletions images/test/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@
err=0
trap 'err=1' ERR

cd spark_job
cd lib
pytest
cd ..

cd lib
pytest
cd spark_job
pytest test/unit
pytest test/integration
cd ..

test $err = 0
2 changes: 2 additions & 0 deletions pkg/workloads/lib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,5 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from .context import Context
217 changes: 0 additions & 217 deletions pkg/workloads/lib/aws.py

This file was deleted.

Loading