From 1db199e05eff53430296522522f351eb31c59fd7 Mon Sep 17 00:00:00 2001 From: Vladimir Lazarenko Date: Tue, 23 Jan 2024 16:22:02 +0100 Subject: [PATCH] New datasources for `datafactory` (#24572) * New datasources for `datafactory` * `azurerm_data_factory_trigger_schedules`: Retrieve a list of all trigger schedules for a given Data Factory * `azurerm_data_factory_trigger_schedule`: Retrieve a rigger schedule for a given Data Factory by name * Handle error * Update website/docs/d/data_factory_trigger_schedule.html.markdown Co-authored-by: stephybun * Update website/docs/d/data_factory_trigger_schedules.html.markdown Co-authored-by: stephybun * Update website/docs/d/data_factory_trigger_schedule.html.markdown Co-authored-by: stephybun * Update internal/services/datafactory/data_factory_trigger_schedules_data_source.go Co-authored-by: stephybun * Rework based on review feedback --------- Co-authored-by: stephybun --- ...ta_factory_trigger_schedule_data_source.go | 318 ++++++++++++++++++ ...ctory_trigger_schedule_data_source_test.go | 39 +++ ...a_factory_trigger_schedules_data_source.go | 98 ++++++ ...tory_trigger_schedules_data_source_test.go | 38 +++ internal/services/datafactory/registration.go | 5 +- ...ata_factory_trigger_schedule.html.markdown | 86 +++++ ...ta_factory_trigger_schedules.html.markdown | 43 +++ 7 files changed, 626 insertions(+), 1 deletion(-) create mode 100644 internal/services/datafactory/data_factory_trigger_schedule_data_source.go create mode 100644 internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go create mode 100644 internal/services/datafactory/data_factory_trigger_schedules_data_source.go create mode 100644 internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go create mode 100644 website/docs/d/data_factory_trigger_schedule.html.markdown create mode 100644 website/docs/d/data_factory_trigger_schedules.html.markdown diff --git a/internal/services/datafactory/data_factory_trigger_schedule_data_source.go b/internal/services/datafactory/data_factory_trigger_schedule_data_source.go new file mode 100644 index 000000000000..a9a5599658ad --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedule_data_source.go @@ -0,0 +1,318 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" // nolint: staticcheck + "github.com/hashicorp/go-azure-sdk/resource-manager/datafactory/2018-06-01/factories" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/parse" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/validate" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type TriggerScheduleDataSource struct{} + +type TriggerScheduleDataSourceModel struct { + Name string `tfschema:"name"` + DataFactoryID string `tfschema:"data_factory_id"` + Description string `tfschema:"description"` + Schedule []TriggerSchedule `tfschema:"schedule"` + StartTime string `tfschema:"start_time"` + EndTime string `tfschema:"end_time"` + TimeZone string `tfschema:"time_zone"` + Frequency string `tfschema:"frequency"` + Interval int64 `tfschema:"interval"` + Activated bool `tfschema:"activated"` + PipelineName string `tfschema:"pipeline_name"` + Annotations []string `tfschema:"annotations"` +} + +type TriggerSchedule struct { + DaysOfMonth []int64 `tfschema:"days_of_month"` + DaysOfWeek []string `tfschema:"days_of_week"` + Hours []int64 `tfschema:"hours"` + Minutes []int64 `tfschema:"minutes"` + Monthly []TriggerScheduleScheduleMonthly `tfschema:"monthly"` +} + +type TriggerScheduleScheduleMonthly struct { + Weekday string `tfschema:"weekday"` + Week int64 `tfschema:"week"` +} + +var _ sdk.DataSource = TriggerScheduleDataSource{} + +func (d TriggerScheduleDataSource) ModelObject() interface{} { + return &TriggerScheduleDataSourceModel{} +} + +func (d TriggerScheduleDataSource) ResourceType() string { + return "azurerm_data_factory_trigger_schedule" +} + +func (d TriggerScheduleDataSource) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validate.DataFactoryPipelineAndTriggerName(), + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: factories.ValidateFactoryID, + }, + } +} + +func (d TriggerScheduleDataSource) Attributes() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "description": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "schedule": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "days_of_month": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeInt, + }, + }, + + "days_of_week": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "hours": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeInt, + }, + }, + + "minutes": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeInt, + }, + }, + + "monthly": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "weekday": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "week": { + Type: pluginsdk.TypeInt, + Computed: true, + }, + }, + }, + }, + }, + }, + }, + + "start_time": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "end_time": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "time_zone": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "frequency": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "interval": { + Type: pluginsdk.TypeInt, + Computed: true, + }, + + "activated": { + Type: pluginsdk.TypeBool, + Computed: true, + }, + + "pipeline_name": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "annotations": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + } +} + +func (d TriggerScheduleDataSource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + var model TriggerScheduleDataSourceModel + if err := metadata.Decode(&model); err != nil { + return err + } + + subscriptionId := metadata.Client.Account.SubscriptionId + client := metadata.Client.DataFactory.TriggersClient + + dataFactoryId, err := factories.ParseFactoryID(model.DataFactoryID) + if err != nil { + return err + } + + id := parse.NewTriggerID(subscriptionId, dataFactoryId.ResourceGroupName, dataFactoryId.FactoryName, model.Name) + if err != nil { + return err + } + + existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("%s was not found", id) + } + return fmt.Errorf("retreiving %s: %+v", id, err) + } + + metadata.SetID(id) + + model.Name = *existing.Name + model.DataFactoryID = dataFactoryId.ID() + + scheduleTriggerProps, ok := existing.Properties.AsScheduleTrigger() + if !ok { + return fmt.Errorf("classifying %s: Expected: %q Received: %q", id.ID(), datafactory.TypeBasicTriggerTypeScheduleTrigger, *existing.Type) + } + + if scheduleTriggerProps != nil { + model.Activated = scheduleTriggerProps.RuntimeState == datafactory.TriggerRuntimeStateStarted + + if recurrence := scheduleTriggerProps.Recurrence; recurrence != nil { + if v := recurrence.StartTime; v != nil { + model.StartTime = v.Format(time.RFC3339) + } + if v := recurrence.EndTime; v != nil { + model.EndTime = v.Format(time.RFC3339) + } + model.Frequency = string(recurrence.Frequency) + model.Interval = int64(*recurrence.Interval) + model.TimeZone = *recurrence.TimeZone + + if schedule := recurrence.Schedule; schedule != nil { + model.Schedule = flattenDataFactoryScheduleModel(schedule) + } + } + + if pipelines := scheduleTriggerProps.Pipelines; pipelines != nil { + if len(*pipelines) > 0 { + pipeline := *pipelines + if reference := pipeline[0].PipelineReference; reference != nil { + model.PipelineName = *reference.ReferenceName + } + } + } + + model.Annotations = flattenDataFactoryAnnotations(scheduleTriggerProps.Annotations) + + if scheduleTriggerProps.Description != nil { + model.Description = *scheduleTriggerProps.Description + } + + } + if err := metadata.Encode(&model); err != nil { + return fmt.Errorf("encoding: %+v", err) + } + + return metadata.Encode(&model) + }, + } +} + +func flattenDataFactoryScheduleModel(schedule *datafactory.RecurrenceSchedule) []TriggerSchedule { + if schedule == nil { + return []TriggerSchedule{} + } + + result := TriggerSchedule{} + results := []TriggerSchedule{} + + if schedule.Hours != nil { + for _, v := range *schedule.Hours { + result.Hours = append(result.Hours, int64(v)) + } + } + + if schedule.Minutes != nil { + for _, v := range *schedule.Minutes { + result.Minutes = append(result.Minutes, int64(v)) + } + } + + if schedule.MonthDays != nil { + for _, v := range *schedule.MonthDays { + result.DaysOfMonth = append(result.DaysOfMonth, int64(v)) + } + } + + if schedule.WeekDays != nil { + weekDays := make([]string, 0) + for _, v := range *schedule.WeekDays { + weekDays = append(weekDays, string(v)) + } + result.DaysOfWeek = weekDays + } + + if schedule.MonthlyOccurrences != nil { + var monthlyOccurrences []TriggerScheduleScheduleMonthly + for _, v := range *schedule.MonthlyOccurrences { + occurrence := TriggerScheduleScheduleMonthly{} + occurrence.Weekday = string(v.Day) + if v.Occurrence != nil { + occurrence.Week = int64(*v.Occurrence) + } + monthlyOccurrences = append(monthlyOccurrences, occurrence) + } + result.Monthly = monthlyOccurrences + } + results = append(results, result) + return results +} diff --git a/internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go b/internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go new file mode 100644 index 000000000000..485527917ce8 --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go @@ -0,0 +1,39 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" +) + +type DataFactoryTriggerScheduleDataSource struct{} + +func TestAccDataFactoryTriggerScheduleDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_factory_trigger_schedule", "test") + r := DataFactoryTriggerScheduleDataSource{} + + data.DataSourceTest(t, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func (DataFactoryTriggerScheduleDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_factory_trigger_schedule" "test" { + name = azurerm_data_factory_trigger_schedule.test.name + data_factory_id = azurerm_data_factory.test.id +} +`, TriggerScheduleResource{}.basic(data)) +} diff --git a/internal/services/datafactory/data_factory_trigger_schedules_data_source.go b/internal/services/datafactory/data_factory_trigger_schedules_data_source.go new file mode 100644 index 000000000000..5654690f3498 --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedules_data_source.go @@ -0,0 +1,98 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/go-autorest/autorest" + "github.com/hashicorp/go-azure-sdk/resource-manager/datafactory/2018-06-01/factories" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type TriggerSchedulesDataSource struct{} + +type TriggerSchedulesDataSourceModel struct { + DataFactoryID string `tfschema:"data_factory_id"` + Items []string `tfschema:"items"` +} + +func (d TriggerSchedulesDataSource) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "data_factory_id": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: factories.ValidateFactoryID, + }, + } +} + +func (d TriggerSchedulesDataSource) Attributes() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "items": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + } +} + +func (d TriggerSchedulesDataSource) ModelObject() interface{} { + return &TriggerSchedulesDataSourceModel{} +} + +func (d TriggerSchedulesDataSource) ResourceType() string { + return "azurerm_data_factory_trigger_schedules" +} + +func (d TriggerSchedulesDataSource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + var model TriggerSchedulesDataSourceModel + if err := metadata.Decode(&model); err != nil { + return err + } + + client := metadata.Client.DataFactory.TriggersClient + + dataFactoryId, err := factories.ParseFactoryID(model.DataFactoryID) + if err != nil { + return err + } + + iter, err := client.ListByFactoryComplete(ctx, dataFactoryId.ResourceGroupName, dataFactoryId.FactoryName) + if err != nil { + if v, ok := err.(autorest.DetailedError); ok { + if utils.ResponseWasNotFound(autorest.Response{Response: v.Response}) { + return fmt.Errorf("fetching triggers list for %s", dataFactoryId) + } + } else { + return fmt.Errorf("fetching triggers list for %s: %+v", dataFactoryId, err) + } + return fmt.Errorf("fetching triggers list for %s: %+v", dataFactoryId, err) + } + + triggers := []string{} + for iter.NotDone() { + trigger := iter.Value() + triggers = append(triggers, *trigger.Name) + if err := iter.NextWithContext(ctx); err != nil { + return fmt.Errorf("fetching triggers list from Azure Data Factory %q, advancing iterator failed: %+v", dataFactoryId.ID(), err) + } + } + + metadata.SetID(dataFactoryId) + model.Items = triggers + + return metadata.Encode(&model) + }, + } +} diff --git a/internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go b/internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go new file mode 100644 index 000000000000..afb65185e550 --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go @@ -0,0 +1,38 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" +) + +type DataFactoryTriggerSchedulesDataSource struct{} + +func TestAccDataFactoryTriggerSchedulesDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_factory_trigger_schedules", "test") + r := DataFactoryTriggerSchedulesDataSource{} + + data.DataSourceTest(t, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("items.#").IsNotEmpty(), + ), + }, + }) +} + +func (DataFactoryTriggerSchedulesDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_factory_trigger_schedules" "test" { + data_factory_id = azurerm_data_factory.test.id +} +`, TriggerScheduleResource{}.basic(data)) +} diff --git a/internal/services/datafactory/registration.go b/internal/services/datafactory/registration.go index 794ccb721fed..6871909efb4d 100644 --- a/internal/services/datafactory/registration.go +++ b/internal/services/datafactory/registration.go @@ -30,7 +30,10 @@ func (r Registration) WebsiteCategories() []string { } func (Registration) DataSources() []sdk.DataSource { - return []sdk.DataSource{} + return []sdk.DataSource{ + TriggerScheduleDataSource{}, + TriggerSchedulesDataSource{}, + } } func (Registration) Resources() []sdk.Resource { diff --git a/website/docs/d/data_factory_trigger_schedule.html.markdown b/website/docs/d/data_factory_trigger_schedule.html.markdown new file mode 100644 index 000000000000..4b25468f5c20 --- /dev/null +++ b/website/docs/d/data_factory_trigger_schedule.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_factory_trigger_schedule" +description: |- + Gets information about a trigger schedule in Azure Data Factory. +--- + +# Data Source: azurerm_data_factory_trigger_schedule + +Use this data source to access information about a trigger schedule in Azure Data Factory. + +## Example Usage + +```hcl +data "azurerm_data_factory_trigger_schedule" "example" { + name = "example_trigger" + data_factory_id = "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1" +} + +output "id" { + value = data.azurerm_data_factory_trigger_schedule.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +- `name` - (Required) The name of the trigger schedule. + +- `data_factory_id` - (Required) The ID of the Azure Data Factory to fetch trigger schedule from. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +- `id` - The ID of the Azure Data Factory trigger schedule. + +* `description` - The Schedule Trigger's description. + +* `schedule` - A `schedule` block as described below, which further specifies the recurrence schedule for the trigger. + +* `start_time` - The time the Schedule Trigger will start. The time will be represented in UTC. + +* `time_zone` - The timezone of the start/end time. + +* `end_time` - The time the Schedule Trigger should end. The time will be represented in UTC. + +* `interval` - The interval for how often the trigger occurs. + +* `frequency` - The trigger frequency. + +* `activated` - Specifies if the Data Factory Schedule Trigger is activated. + +* `pipeline_name` - The Data Factory Pipeline name that the trigger will act on. + +* `annotations` - List of tags that can be used for describing the Data Factory Schedule Trigger. + +--- + +A `schedule` block exports the following: + +* `days_of_month` - Day(s) of the month on which the trigger is scheduled. + +* `days_of_week` - Day(s) of the week on which the trigger is scheduled. + +* `hours` - Hours of the day on which the trigger is scheduled. + +* `minutes` - Minutes of the hour on which the trigger is scheduled. + +* `monthly` - A `monthly` block as documented below, which specifies the days of the month on which the trigger is scheduled. + +--- + +A `monthly` block exports the following: + +* `weekday` - The day of the week on which the trigger runs. + +* `week` - The occurrence of the specified day during the month. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +- `read` - (Defaults to 5 minutes) Used when retrieving the Azure Data Factory trigger schedule. diff --git a/website/docs/d/data_factory_trigger_schedules.html.markdown b/website/docs/d/data_factory_trigger_schedules.html.markdown new file mode 100644 index 000000000000..26d0133c4712 --- /dev/null +++ b/website/docs/d/data_factory_trigger_schedules.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_factory_trigger_schedules" +description: |- + Gets information about all existing trigger schedules in Azure Data Factory. +--- + +# Data Source: azurerm_data_factory_trigger_schedules + +Use this data source to access information about all existing trigger schedules in Azure Data Factory. + +## Example Usage + +```hcl +data "azurerm_data_factory_trigger_schedules" "example" { + data_factory_id = "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1" +} + +output "items" { + value = data.azurerm_data_factory_trigger_schedules.example.items +} +``` + +## Arguments Reference + +The following arguments are supported: + +- `data_factory_id` - (Required) The ID of the Azure Data Factory to fetch trigger schedules from. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +- `id` - The ID of the Azure Data Factory. + +- `items` - A list of trigger schedule names available in this Azure Data Factory. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +- `read` - (Defaults to 5 minutes) Used when retrieving the Azure Data Factory trigger schedules.