Skip to content

Commit 77772a1

Browse files
committed
Refactored tests to use generated client instead of kubectl
1 parent 83cb8dd commit 77772a1

File tree

7 files changed

+142
-90
lines changed

7 files changed

+142
-90
lines changed

Gopkg.lock

+37-37
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

docs/quick-start-guide.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -184,9 +184,9 @@ The operator submits the Spark Pi example to run once it receives an event indic
184184
The Kubernetes Operator for "Apache Spark comes with an optional mutating admission webhook for customizing Spark driver and executor pods based on the specification in `SparkApplication` objects, e.g., mounting user-specified ConfigMaps and volumes, and setting pod affinity/anti-affinity, and adding tolerations.
185185

186186
The webhook requires a X509 certificate for TLS for pod admission requests and responses between the Kubernetes API server and the webhook server running inside the operator. For that, the certificate and key files must be accessible by the webhook server.
187-
The Spark Operator ships with a tool at `hack/gencerts.sh` for generating the CA and server certificate and putting the certificate and key files into a secret named `spark-webhook-certs` in the namespace `spark-operator`. This secret will be mounted into the Spark Operator pod.
187+
The Kubernetes Operator for Spark ships with a tool at `hack/gencerts.sh` for generating the CA and server certificate and putting the certificate and key files into a secret named `spark-webhook-certs` in the namespace `spark-operator`. This secret will be mounted into the operator pod.
188188

189-
Run the following command to create secret with certificate and key files using Batch Job, and install the Spark Operator Deployment with the mutating admission webhook:
189+
Run the following command to create the secret with a certificate and key files using a Batch Job, and install the operator Deployment with the mutating admission webhook:
190190

191191
```bash
192192
$ kubectl apply -f manifest/spark-operator-with-webhook.yaml

test/e2e/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,4 +21,4 @@ Note that all tests are run on a live Kubernetes cluster. After the tests are do
2121

2222
* `basic_test.go`
2323

24-
This test submits `spark-pi.yaml` contained in the `\examples` using `kubectl`. It then checks that the Spark job successfully completes with the right result of Pi.
24+
This test submits `spark-pi.yaml` contained in the `\examples`. It then checks that the Spark job successfully completes with the right result of Pi.

test/e2e/basic_test.go

+20-40
Original file line numberDiff line numberDiff line change
@@ -17,38 +17,20 @@ limitations under the License.
1717
package e2e
1818

1919
import (
20-
"bytes"
21-
"encoding/json"
20+
"github.com/GoogleCloudPlatform/spark-on-k8s-operator/pkg/apis/sparkoperator.k8s.io/v1alpha1"
2221
"github.com/stretchr/testify/assert"
23-
"io"
22+
"k8s.io/api/core/v1"
23+
appFramework "k8s.io/spark-on-k8s-operator/test/e2e/framework"
2424
"log"
25-
"os/exec"
2625
"strings"
2726
"testing"
2827
"time"
2928
)
3029

31-
func convertStdoutToString(stdout io.ReadCloser) string {
32-
buf := new(bytes.Buffer)
33-
buf.ReadFrom(stdout)
34-
return buf.String()
35-
}
36-
37-
func runCmdAndReturnStdout(t *testing.T, cmd string, cmdArgs ...string) string {
38-
statusCmd := exec.Command(cmd, cmdArgs...)
39-
stdout, err := statusCmd.StdoutPipe()
40-
assert.Equal(t, err, nil)
41-
err = statusCmd.Start()
42-
assert.Equal(t, err, nil)
43-
return convertStdoutToString(stdout)
44-
}
45-
46-
func getJobStatus(t *testing.T, statusStr string, result map[string]interface{}) string {
47-
json.Unmarshal([]byte(statusStr), &result)
48-
statusField := result["status"].(map[string]interface{})["applicationState"]
49-
errMsg := statusField.(map[string]interface{})["errorMessage"].(string)
50-
assert.Equal(t, errMsg, "")
51-
return statusField.(map[string]interface{})["state"].(string)
30+
func getJobStatus(t *testing.T) v1alpha1.ApplicationStateType {
31+
app, err := appFramework.GetSparkApplication(framework.SparkApplicationClient, "default", "spark-pi")
32+
assert.Equal(t, nil, err)
33+
return app.Status.AppState.State
5234
}
5335

5436
func TestSubmitSparkPiYaml(t *testing.T) {
@@ -57,33 +39,31 @@ func TestSubmitSparkPiYaml(t *testing.T) {
5739
// Wait for test job to finish. Time out after 90 seconds.
5840
timeout := 90
5941

60-
yamlPath := "../../examples/spark-pi.yaml"
61-
submitResult := runCmdAndReturnStdout(t, "kubectl", "apply", "-f", yamlPath)
62-
assert.Equal(t, "sparkapplication.sparkoperator.k8s.io/spark-pi created\n", submitResult)
42+
sa, err := appFramework.MakeSparkApplicationFromYaml("../../examples/spark-pi.yaml")
43+
assert.Equal(t, nil, err)
44+
err = appFramework.CreateSparkApplication(framework.SparkApplicationClient, "default", sa)
45+
assert.Equal(t, nil, err)
6346

64-
statusStr := runCmdAndReturnStdout(t, "kubectl", "get", "sparkapplication", "spark-pi", "-o", "json")
65-
var result map[string]interface{}
66-
status := getJobStatus(t, statusStr, result)
47+
status := getJobStatus(t)
6748

6849
timePassed := 0
6950
// Update job status every 5 seconds until job is done or timeout threshold is reached.
7051
for status != "COMPLETED" && timePassed <= timeout {
7152
log.Print("Waiting for the Spark job to finish...")
7253
time.Sleep(5 * time.Second)
7354
timePassed += 5
74-
statusStr = runCmdAndReturnStdout(t, "kubectl", "get", "sparkapplication", "spark-pi", "-o", "json")
75-
status = getJobStatus(t, statusStr, result)
55+
status = getJobStatus(t)
7656
}
7757
if timePassed > timeout {
7858
log.Fatalf("Time out waiting for Spark job to finish!")
7959
}
8060

81-
json.Unmarshal([]byte(statusStr), &result)
82-
driverInfo := result["status"].(map[string]interface{})["driverInfo"]
83-
podName := driverInfo.(map[string]interface{})["podName"].(string)
84-
logStr := runCmdAndReturnStdout(t, "kubectl", "logs", podName)
85-
assert.NotEqual(t, -1, strings.Index(logStr, "Pi is roughly 3"))
61+
app, _ := appFramework.GetSparkApplication(framework.SparkApplicationClient, "default", "spark-pi")
62+
podName := app.Status.DriverInfo.PodName
63+
rawLogs, err := framework.KubeClient.CoreV1().Pods("default").GetLogs(podName, &v1.PodLogOptions{}).Do().Raw()
64+
assert.Equal(t, nil, err)
65+
assert.NotEqual(t, -1, strings.Index(string(rawLogs), "Pi is roughly 3"))
8666

87-
logStr = runCmdAndReturnStdout(t, "kubectl", "delete", "sparkapplication", "spark-pi")
88-
assert.Equal(t, "sparkapplication.sparkoperator.k8s.io \"spark-pi\" deleted\n", logStr)
67+
err = appFramework.DeleteSparkApplication(framework.SparkApplicationClient, "default", "spark-pi")
68+
assert.Equal(t, nil, err)
8969
}

0 commit comments

Comments
 (0)