-
Notifications
You must be signed in to change notification settings - Fork 41
/
Copy pathintegration_pipeline.go
319 lines (263 loc) · 12.4 KB
/
integration_pipeline.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
/*
Copyright 2022 Red Hat Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tekton
import (
"encoding/json"
"fmt"
"strings"
"os"
"time"
"github.com/go-logr/logr"
applicationapiv1alpha1 "github.com/konflux-ci/application-api/api/v1alpha1"
"github.com/konflux-ci/integration-service/api/v1beta2"
"github.com/konflux-ci/operator-toolkit/metadata"
tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/controller/controllerutil"
)
const (
// PipelinesLabelPrefix is the prefix of the pipelines label
PipelinesLabelPrefix = "pipelines.appstudio.openshift.io"
// TestLabelPrefix contains the prefix applied to labels and annotations related to testing.
TestLabelPrefix = "test.appstudio.openshift.io"
// PipelinesAsCodePrefix contains the prefix applied to labels and annotations copied from Pipelines as Code resources.
PipelinesAsCodePrefix = "pac.test.appstudio.openshift.io"
// BuildPipelineRunPrefix contains the build pipeline run related labels and annotations
BuildPipelineRunPrefix = "build.appstudio"
// CustomLabelPrefix contains the prefix applied to custom user-defined labels and annotations.
CustomLabelPrefix = "custom.appstudio.openshift.io"
// resource labels for snapshot, application and component
ResourceLabelSuffix = "appstudio.openshift.io"
// PipelineTypeTest is the type for PipelineRuns created to run an integration Pipeline
PipelineTypeTest = "test"
// Name of tekton resolver for git
TektonResolverGit = "git"
// Name of tekton git resolver param url
TektonResolverGitParamURL = "url"
// Name of tekton git resolver param revision
TektonResolverGitParamRevision = "revision"
)
var (
// PipelinesTypeLabel is the label used to describe the type of pipeline
PipelinesTypeLabel = fmt.Sprintf("%s/%s", PipelinesLabelPrefix, "type")
// TestNameLabel is the label used to specify the name of the Test associated with the PipelineRun
TestNameLabel = fmt.Sprintf("%s/%s", TestLabelPrefix, "name")
// ScenarioNameLabel is the label used to specify the name of the IntegrationTestScenario associated with the PipelineRun
ScenarioNameLabel = fmt.Sprintf("%s/%s", TestLabelPrefix, "scenario")
// SnapshotNameLabel is the label of specific the name of the snapshot associated with PipelineRun
SnapshotNameLabel = fmt.Sprintf("%s/%s", ResourceLabelSuffix, "snapshot")
// EnvironmentNameLabel is the label of specific the name of the environment associated with PipelineRun
EnvironmentNameLabel = fmt.Sprintf("%s/%s", ResourceLabelSuffix, "environment")
// ApplicationNameLabel is the label of specific the name of the Application associated with PipelineRun
ApplicationNameLabel = fmt.Sprintf("%s/%s", ResourceLabelSuffix, "application")
// ComponentNameLabel is the label of specific the name of the component associated with PipelineRun
ComponentNameLabel = fmt.Sprintf("%s/%s", ResourceLabelSuffix, "component")
// OptionalLabel is the label used to specify if an IntegrationTestScenario is allowed to fail
OptionalLabel = fmt.Sprintf("%s/%s", TestLabelPrefix, "optional")
)
// IntegrationPipelineRun is a PipelineRun alias, so we can add new methods to it in this file.
type IntegrationPipelineRun struct {
tektonv1.PipelineRun
}
// AsPipelineRun casts the IntegrationPipelineRun to PipelineRun, so it can be used in the Kubernetes client.
func (r *IntegrationPipelineRun) AsPipelineRun() *tektonv1.PipelineRun {
return &r.PipelineRun
}
// NewIntegrationPipelineRun creates an empty PipelineRun in the given namespace. The name will be autogenerated,
// using the prefix passed as an argument to the function.
func NewIntegrationPipelineRun(prefix, namespace string, integrationTestScenario v1beta2.IntegrationTestScenario) *IntegrationPipelineRun {
resolverParams := []tektonv1.Param{}
for _, scenarioParam := range integrationTestScenario.Spec.ResolverRef.Params {
resolverParam := tektonv1.Param{
Name: scenarioParam.Name,
Value: tektonv1.ParamValue{
Type: tektonv1.ParamTypeString,
StringVal: scenarioParam.Value,
},
}
resolverParams = append(resolverParams, resolverParam)
}
pipelineRun := tektonv1.PipelineRun{
ObjectMeta: metav1.ObjectMeta{
GenerateName: prefix + "-",
Namespace: namespace,
},
Spec: tektonv1.PipelineRunSpec{
PipelineRef: &tektonv1.PipelineRef{
ResolverRef: tektonv1.ResolverRef{
Resolver: tektonv1.ResolverName(integrationTestScenario.Spec.ResolverRef.Resolver),
Params: resolverParams,
},
},
},
}
return &IntegrationPipelineRun{pipelineRun}
}
// Updates git resolver values parameters with values of params specified in the input map
// updates only exsitings parameters, doens't create new ones
func (iplr *IntegrationPipelineRun) WithUpdatedTestsGitResolver(params map[string]string) *IntegrationPipelineRun {
if iplr.Spec.PipelineRef.ResolverRef.Resolver != TektonResolverGit {
// if the resolver is not git-resolver, we cannot update the git ref
return iplr
}
for originalParamIndex, originalParam := range iplr.Spec.PipelineRef.ResolverRef.Params {
if _, ok := params[originalParam.Name]; ok {
// remeber to use the original index to update the value, we cannot update value given by range directly
iplr.Spec.PipelineRef.ResolverRef.Params[originalParamIndex].Value.StringVal = params[originalParam.Name]
}
}
return iplr
}
// WithFinalizer adds a Finalizer on the Integration PipelineRun
func (iplr *IntegrationPipelineRun) WithFinalizer(finalizer string) *IntegrationPipelineRun {
controllerutil.AddFinalizer(iplr, finalizer)
return iplr
}
// WithExtraParam adds an extra param to the Integration PipelineRun. If the parameter is not part of the Pipeline
// definition, it will be silently ignored.
func (r *IntegrationPipelineRun) WithExtraParam(name string, value tektonv1.ParamValue) *IntegrationPipelineRun {
r.Spec.Params = append(r.Spec.Params, tektonv1.Param{
Name: name,
Value: value,
})
return r
}
// WithExtraParams adds all provided parameters to the Integration PipelineRun.
func (r *IntegrationPipelineRun) WithExtraParams(params []v1beta2.PipelineParameter) *IntegrationPipelineRun {
for _, param := range params {
var value tektonv1.ParamValue
switch {
case param.Value != "":
value.StringVal = param.Value
value.Type = tektonv1.ParamTypeString
case len(param.Values) > 0:
value.ArrayVal = param.Values
value.Type = tektonv1.ParamTypeArray
default:
value.Type = tektonv1.ParamTypeObject
}
r.WithExtraParam(param.Name, value)
}
return r
}
// WithSnapshot adds a param containing the Snapshot as a json string to the integration PipelineRun.
// It also adds the Snapshot name label and copies the Component name label if it exists
func (r *IntegrationPipelineRun) WithSnapshot(snapshot *applicationapiv1alpha1.Snapshot) *IntegrationPipelineRun {
// We ignore the error here because none should be raised when marshalling the spec of a CRD.
// If we end up deciding it is useful, we will need to pass the errors through the chain and
// add something like a `Complete` function that returns the final object and error.
snapshotString, _ := json.Marshal(snapshot.Spec)
r.WithExtraParam("SNAPSHOT", tektonv1.ParamValue{
Type: tektonv1.ParamTypeString,
StringVal: string(snapshotString),
})
if r.ObjectMeta.Labels == nil {
r.ObjectMeta.Labels = map[string]string{}
}
r.ObjectMeta.Labels[SnapshotNameLabel] = snapshot.Name
componentLabel, found := snapshot.GetLabels()[ComponentNameLabel]
if found {
r.ObjectMeta.Labels[ComponentNameLabel] = componentLabel
}
// copy PipelineRun PAC, build, test and custom annotations/labels from Snapshot to integration test PipelineRun
prefixes := []string{PipelinesAsCodePrefix, BuildPipelineRunPrefix, TestLabelPrefix, CustomLabelPrefix}
for _, prefix := range prefixes {
// Copy labels and annotations prefixed with defined prefix
_ = metadata.CopyAnnotationsByPrefix(&snapshot.ObjectMeta, &r.ObjectMeta, prefix)
_ = metadata.CopyLabelsByPrefix(&snapshot.ObjectMeta, &r.ObjectMeta, prefix)
}
return r
}
// WithIntegrationLabels adds the type, optional flag and IntegrationTestScenario name as labels to the Integration PipelineRun.
func (r *IntegrationPipelineRun) WithIntegrationLabels(integrationTestScenario *v1beta2.IntegrationTestScenario) *IntegrationPipelineRun {
if r.ObjectMeta.Labels == nil {
r.ObjectMeta.Labels = map[string]string{}
}
r.ObjectMeta.Labels[PipelinesTypeLabel] = PipelineTypeTest
r.ObjectMeta.Labels[ScenarioNameLabel] = integrationTestScenario.Name
if metadata.HasLabel(integrationTestScenario, OptionalLabel) {
r.ObjectMeta.Labels[OptionalLabel] = integrationTestScenario.Labels[OptionalLabel]
}
return r
}
// WithIntegrationAnnotations copies the App Studio annotations from the
// IntegrationTestScenario to the PipelineRun
func (r *IntegrationPipelineRun) WithIntegrationAnnotations(its *v1beta2.IntegrationTestScenario) *IntegrationPipelineRun {
for k, v := range its.GetAnnotations() {
if strings.Contains(k, "appstudio.openshift.io/") {
if err := metadata.SetAnnotation(r, k, v); err != nil {
// this will only happen if we pass IntegrationPipelineRun as nil
panic(err)
}
}
}
return r
}
// WithApplication adds the name of application as a label to the Integration PipelineRun.
func (r *IntegrationPipelineRun) WithApplication(application *applicationapiv1alpha1.Application) *IntegrationPipelineRun {
if r.ObjectMeta.Labels == nil {
r.ObjectMeta.Labels = map[string]string{}
}
r.ObjectMeta.Labels[ApplicationNameLabel] = application.Name
return r
}
// WithIntegrationTimeouts fetches the Integration timeouts from either the integrationTestScenario annotations or
// the environment variables and adds them to the integration PipelineRun.
func (r *IntegrationPipelineRun) WithIntegrationTimeouts(integrationTestScenario *v1beta2.IntegrationTestScenario, logger logr.Logger) *IntegrationPipelineRun {
pipelineTimeoutStr := os.Getenv("PIPELINE_TIMEOUT")
if metadata.HasAnnotation(integrationTestScenario, v1beta2.PipelineTimeoutAnnotation) {
pipelineTimeoutStr = integrationTestScenario.Annotations[v1beta2.PipelineTimeoutAnnotation]
}
taskTimeoutStr := os.Getenv("TASKS_TIMEOUT")
if metadata.HasAnnotation(integrationTestScenario, v1beta2.TasksTimeoutAnnotation) {
taskTimeoutStr = integrationTestScenario.Annotations[v1beta2.TasksTimeoutAnnotation]
}
finallyTimeoutStr := os.Getenv("FINALLY_TIMEOUT")
if metadata.HasAnnotation(integrationTestScenario, v1beta2.FinallyTimeoutAnnotation) {
finallyTimeoutStr = integrationTestScenario.Annotations[v1beta2.FinallyTimeoutAnnotation]
}
r.Spec.Timeouts = &tektonv1.TimeoutFields{}
if pipelineTimeoutStr != "" {
pipelineRunTimeout, err := time.ParseDuration(pipelineTimeoutStr)
if err == nil {
r.Spec.Timeouts.Pipeline = &metav1.Duration{Duration: pipelineRunTimeout}
} else {
logger.Error(err, "failed to parse the PIPELINE_TIMEOUT")
}
}
if taskTimeoutStr != "" {
taskTimeout, err := time.ParseDuration(taskTimeoutStr)
if err == nil {
r.Spec.Timeouts.Tasks = &metav1.Duration{Duration: taskTimeout}
} else {
logger.Error(err, "failed to parse the TASKS_TIMEOUT")
}
}
if finallyTimeoutStr != "" {
finallyTimeout, err := time.ParseDuration(finallyTimeoutStr)
if err == nil {
r.Spec.Timeouts.Finally = &metav1.Duration{Duration: finallyTimeout}
} else {
logger.Error(err, "failed to parse the FINALLY_TIMEOUT")
}
}
// If the sum of tasks and finally timeout durations is greater than the pipeline timeout duration,
// increase the pipeline timeout to prevent a pipelineRun validation failure
if r.Spec.Timeouts.Tasks != nil && r.Spec.Timeouts.Finally != nil && r.Spec.Timeouts.Pipeline != nil &&
r.Spec.Timeouts.Tasks.Duration+r.Spec.Timeouts.Finally.Duration > r.Spec.Timeouts.Pipeline.Duration {
r.Spec.Timeouts.Pipeline = &metav1.Duration{Duration: r.Spec.Timeouts.Tasks.Duration + r.Spec.Timeouts.Finally.Duration}
logger.Info(fmt.Sprintf("Setting the pipeline timeout for %s to be the sum of tasks + finally: %.1f hours", r.Name,
r.Spec.Timeouts.Pipeline.Duration.Hours()))
}
return r
}