Skip to content

Commit

Permalink
changed complex fields to pointers
Browse files Browse the repository at this point in the history
  • Loading branch information
warber committed Sep 4, 2024
1 parent 34e22b2 commit d787492
Show file tree
Hide file tree
Showing 11 changed files with 109 additions and 43 deletions.
18 changes: 11 additions & 7 deletions dynatrace/api/openpipeline/settings/configuration.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import (
)

type Configuration struct {
Kind string `json:"id"`
Editable *bool `json:"editable,omitempty"`
Version string `json:"version"`
CustomBasePath string `json:"customBasePath"`
Endpoints Endpoints `json:"-"`
Pipelines Pipelines `json:"-"`
Routing RoutingTable `json:"routing"`
Kind string `json:"id"`
Editable *bool `json:"editable,omitempty"`
Version string `json:"version"`
CustomBasePath string `json:"customBasePath"`
Endpoints *Endpoints `json:"-"`
Pipelines *Pipelines `json:"-"`
Routing *RoutingTable `json:"routing"`
}

func (d *Configuration) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -75,6 +75,8 @@ func (d *Configuration) MarshalHCL(properties hcl.Properties) error {
"editable": d.Editable,
"kind": d.Kind,
"version": d.Version,
"pipelines": d.Pipelines,
"routing": d.Routing,
}); err != nil {
return err
}
Expand All @@ -88,6 +90,8 @@ func (d *Configuration) UnmarshalHCL(decoder hcl.Decoder) error {
"editable": &d.Editable,
"kind": &d.Kind,
"version": &d.Version,
"pipelines": &d.Pipelines,
"routing": &d.Routing,
})
}

Expand Down
6 changes: 3 additions & 3 deletions dynatrace/api/openpipeline/settings/pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const (
)

type Pipelines struct {
Pipelines []Pipeline
Pipelines []*Pipeline
}

func (ep *Pipelines) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -269,8 +269,8 @@ func (p DefaultPipeline) MarshalJSON() ([]byte, error) {

type ClassicPipeline struct {
BasePipeline
Processing ClassicProcessingStage `json:"processing,omitempty"`
SettingsSchema string `json:"settingsSchema,omitempty"`
Processing *ClassicProcessingStage `json:"processing,omitempty"`
SettingsSchema string `json:"settingsSchema,omitempty"`
}

func (p *ClassicPipeline) Schema() map[string]*schema.Schema {
Expand Down
8 changes: 4 additions & 4 deletions dynatrace/api/openpipeline/settings/processor.go
Original file line number Diff line number Diff line change
Expand Up @@ -328,8 +328,8 @@ func (ep *FieldExtraction) UnmarshalHCL(decoder hcl.Decoder) error {

type BizEventExtractionProcessor struct {
Processor
EventProvider ValueAssignment `json:"eventProvider,omitempty"`
EventType ValueAssignment `json:"eventType,omitempty"`
EventProvider *ValueAssignment `json:"eventProvider,omitempty"`
EventType *ValueAssignment `json:"eventType,omitempty"`
FieldExtraction *FieldExtraction `json:"fieldExtraction,omitempty"`
}

Expand Down Expand Up @@ -395,7 +395,7 @@ func (ep BizEventExtractionProcessor) MarshalJSON() ([]byte, error) {

type DavisEventExtractionProcessor struct {
Processor
Properties []DavisEventProperty `json:"properties,omitempty"`
Properties []*DavisEventProperty `json:"properties,omitempty"`
}

func (ep *DavisEventExtractionProcessor) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -741,7 +741,7 @@ func (ep NoStorageProcessor) MarshalJSON() ([]byte, error) {

type SecurityContextProcessor struct {
Processor
Value ValueAssignment `json:"value"`
Value *ValueAssignment `json:"value"`
}

func (p *SecurityContextProcessor) Schema() map[string]*schema.Schema {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ import (

const (
DavisEventExtractionProcessorType = "davis"
BizEventExtractionProcessorType = "bizEvent"
BizEventExtractionProcessorType = "bizevent"
)

type DataExtractionProcessors struct {
Processors []DataExtractionProcessor
Processors []*DataExtractionProcessor
}

func (ep *DataExtractionProcessors) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -63,7 +63,7 @@ func (ep *DataExtractionProcessors) UnmarshalJSON(b []byte) error {
return err
}

ep.Processors = append(ep.Processors, processor)
ep.Processors = append(ep.Processors, &processor)
}
return nil
}
Expand Down
4 changes: 2 additions & 2 deletions dynatrace/api/openpipeline/settings/processor_endpoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import (
)

type EndpointProcessors struct {
Processors []EndpointProcessor
Processors []*EndpointProcessor
}

func (ep *EndpointProcessors) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -59,7 +59,7 @@ func (ep *EndpointProcessors) UnmarshalJSON(b []byte) error {
return err
}

ep.Processors = append(ep.Processors, processor)
ep.Processors = append(ep.Processors, &processor)
}
return nil
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ const (
)

type MetricExtractionProcessors struct {
Processors []MetricExtractionProcessor
Processors []*MetricExtractionProcessor
}

func (ep *MetricExtractionProcessors) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -63,7 +63,7 @@ func (ep *MetricExtractionProcessors) UnmarshalJSON(b []byte) error {
return err
}

ep.Processors = append(ep.Processors, processor)
ep.Processors = append(ep.Processors, &processor)
}
return nil
}
Expand Down
68 changes: 65 additions & 3 deletions dynatrace/api/openpipeline/settings/processor_processing.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ import (

const (
TechnologyProcessorType = "technology"
SqlxProcessorType = "sqlx"
)

type ProcessingStageProcessors struct {
Processors []ProcessingStageProcessor
Processors []*ProcessingStageProcessor
}

func (ep *ProcessingStageProcessors) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -62,7 +63,7 @@ func (ep *ProcessingStageProcessors) UnmarshalJSON(b []byte) error {
return err
}

ep.Processors = append(ep.Processors, processor)
ep.Processors = append(ep.Processors, &processor)
}
return nil
}
Expand Down Expand Up @@ -208,7 +209,7 @@ func (ep *ProcessingStageProcessor) UnmarshalJSON(b []byte) error {
}

type ClassicProcessingStageProcessors struct {
Processors []ClassicProcessingStageProcessor
Processors []*ClassicProcessingStageProcessor
}

func (ep *ClassicProcessingStageProcessors) Schema() map[string]*schema.Schema {
Expand All @@ -230,6 +231,38 @@ func (ep *ClassicProcessingStageProcessors) UnmarshalHCL(decoder hcl.Decoder) er
return decoder.Decode("processors", &ep.Processors)
}

func (ep ClassicProcessingStageProcessors) MarshalJSON() ([]byte, error) {
rawProcessors := []json.RawMessage{}
for _, processor := range ep.Processors {
rawProcessor, err := processor.MarshalJSON()
if err != nil {
return nil, err
}

rawProcessors = append(rawProcessors, rawProcessor)
}

return json.Marshal(rawProcessors)
}

func (ep *ClassicProcessingStageProcessors) UnmarshalJSON(b []byte) error {
rawProcessors := []json.RawMessage{}
if err := json.Unmarshal(b, &rawProcessors); err != nil {
return err
}

ep.Processors = nil
for _, rawProcessor := range rawProcessors {
processor := ClassicProcessingStageProcessor{}
if err := json.Unmarshal(rawProcessor, &processor); err != nil {
return err
}

ep.Processors = append(ep.Processors, &processor)
}
return nil
}

type ClassicProcessingStageProcessor struct {
sqlxProcessor *SqlxProcessor
}
Expand Down Expand Up @@ -258,3 +291,32 @@ func (ep *ClassicProcessingStageProcessor) UnmarshalHCL(decoder hcl.Decoder) err
"sqlx_processor": ep.sqlxProcessor,
})
}

func (ep ClassicProcessingStageProcessor) MarshalJSON() ([]byte, error) {
if ep.sqlxProcessor != nil {
return json.Marshal(ep.sqlxProcessor)
}

return nil, errors.New("missing ClassicProcessingStageProcessor value")
}

func (ep *ClassicProcessingStageProcessor) UnmarshalJSON(b []byte) error {
ttype, err := ExtractType(b)
if err != nil {
return err
}

switch ttype {
case SecurityContextProcessorType:
sqlxProcessor := SqlxProcessor{}
if err := json.Unmarshal(b, &sqlxProcessor); err != nil {
return err
}
ep.sqlxProcessor = &sqlxProcessor

default:
return fmt.Errorf("unknown ClassicProcessingStageProcessor type: %s", ttype)
}

return nil
}
4 changes: 2 additions & 2 deletions dynatrace/api/openpipeline/settings/processor_security.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ const (
)

type SecurityContextProcessors struct {
Processors []SecContextProcessor
Processors []*SecContextProcessor
}

func (ep *SecurityContextProcessors) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -62,7 +62,7 @@ func (ep *SecurityContextProcessors) UnmarshalJSON(b []byte) error {
return err
}

ep.Processors = append(ep.Processors, processor)
ep.Processors = append(ep.Processors, &processor)
}
return nil
}
Expand Down
4 changes: 2 additions & 2 deletions dynatrace/api/openpipeline/settings/processor_storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ const (
)

type StorageStageProcessors struct {
Processors []StorageStageProcessor
Processors []*StorageStageProcessor
}

func (ep *StorageStageProcessors) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -63,7 +63,7 @@ func (ep *StorageStageProcessors) UnmarshalJSON(b []byte) error {
return err
}

ep.Processors = append(ep.Processors, processor)
ep.Processors = append(ep.Processors, &processor)
}
return nil
}
Expand Down
12 changes: 6 additions & 6 deletions dynatrace/api/openpipeline/settings/routing.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import (

type RoutingTable struct {
// CatchAllPipeline The default pipeline records are routed into if no dynamic routing entries apply.
CatchAllPipeline RoutingTableEntryTarget `json:"catchAllPipeline"`
CatchAllPipeline *RoutingTableEntryTarget `json:"catchAllPipeline"`

// Editable Indicates if the user is allowed to edit this object based on permissions and builtin property.
Editable *bool `json:"editable,omitempty"`

// Entries List of all dynamic routes.
Entries RoutingTableEntries `json:"-"`
Entries *RoutingTableEntries `json:"-"`
}

func (t *RoutingTable) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -54,9 +54,9 @@ func (t *RoutingTable) MarshalHCL(properties hcl.Properties) error {

func (t *RoutingTable) UnmarshalHCL(decoder hcl.Decoder) error {
return decoder.DecodeAll(map[string]any{
"catch_all_pipeline": t.CatchAllPipeline,
"editable": t.Editable,
"entries": t.Entries,
"catch_all_pipeline": &t.CatchAllPipeline,
"editable": &t.Editable,
"entries": &t.Entries,
})
}

Expand Down Expand Up @@ -133,7 +133,7 @@ func (t *RoutingTableEntryTarget) UnmarshalHCL(decoder hcl.Decoder) error {
}

type RoutingTableEntries struct {
Entries []RoutingTableEntry
Entries []*RoutingTableEntry
}

func (e *RoutingTableEntries) Schema() map[string]*schema.Schema {
Expand Down
18 changes: 9 additions & 9 deletions dynatrace/api/openpipeline/settings/stage.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ func (d *MetricExtractionStage) UnmarshalJSON(b []byte) error {
}

type DataExtractionStage struct {
Editable *bool `json:"editable,omitempty"`
Processors DataExtractionProcessors `json:"-"`
Editable *bool `json:"editable,omitempty"`
Processors *DataExtractionProcessors `json:"-"`
}

func (f *DataExtractionStage) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -151,13 +151,13 @@ func (d *DataExtractionStage) UnmarshalJSON(b []byte) error {

*d = DataExtractionStage(dataExStage.dataExtractionStage)

d.Processors = DataExtractionProcessors{}
d.Processors = &DataExtractionProcessors{}
return json.Unmarshal(dataExStage.RawProcessors, &d.Processors)
}

type ProcessingStage struct {
Editable *bool `json:"editable,omitempty"`
Processors ProcessingStageProcessors `json:"-"`
Editable *bool `json:"editable,omitempty"`
Processors *ProcessingStageProcessors `json:"-"`
}

func (f *ProcessingStage) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -223,7 +223,7 @@ func (d *ProcessingStage) UnmarshalJSON(b []byte) error {

*d = ProcessingStage(processingSt.processingStage)

d.Processors = ProcessingStageProcessors{}
d.Processors = &ProcessingStageProcessors{}
return json.Unmarshal(processingSt.RawProcessors, &d.Processors)
}

Expand Down Expand Up @@ -381,8 +381,8 @@ func (d *SecurityContextStage) UnmarshalJSON(b []byte) error {
}

type ClassicProcessingStage struct {
Editable *bool `json:"editable,omitempty"`
Processors ClassicProcessingStageProcessors `json:"-"`
Editable *bool `json:"editable,omitempty"`
Processors *ClassicProcessingStageProcessors `json:"-"`
}

func (f *ClassicProcessingStage) Schema() map[string]*schema.Schema {
Expand Down Expand Up @@ -448,6 +448,6 @@ func (d *ClassicProcessingStage) UnmarshalJSON(b []byte) error {

*d = ClassicProcessingStage(classicProcessingSt.classicProcessingStage)

d.Processors = ClassicProcessingStageProcessors{}
d.Processors = &ClassicProcessingStageProcessors{}
return json.Unmarshal(classicProcessingSt.RawProcessors, &d.Processors)
}

0 comments on commit d787492

Please sign in to comment.