Skip to content

draft: investigating max files per commit. #159

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 8 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
74 changes: 0 additions & 74 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,77 +52,3 @@ jobs:
module-change-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/**
module-asset-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/**
use-ssh-source-format: true

- name: Test Action Outputs
id: test-outputs
run: |
echo "Testing action outputs..."

# Test if outputs are set
if [[ -n "${{ steps.test-action.outputs.changed-module-names }}" ]]; then
echo "✅ Output 'changed-modules-names': ${{ steps.test-action.outputs.changed-module-names }}"
else
echo "❌ No changed module names found"
fi

if [[ -n "${{ steps.test-action.outputs.changed-module-paths }}" ]]; then
echo "✅ Output 'changed-module-paths': ${{ steps.test-action.outputs.changed-module-paths }}"
else
echo "❌ No changed module paths found"
fi

if [[ -n "${{ steps.test-action.outputs.changed-modules-map }}" ]]; then
echo "✅ Output 'changed-modules-map':"
echo '${{ steps.test-action.outputs.changed-modules-map }}' | jq -r '
"Found \(length) changed module(s):",
(to_entries[] |
"• \(.key):",
" - Path: \(.value.path)",
" - Current Tag: \(.value.currentTag)",
" - Next Tag: \(.value.nextTag)",
" - Release Type: \(.value.releaseType)"
)
'
else
echo "❌ No changed modules map found"
fi

# Silently validate JSON structure is an object
echo '${{ steps.test-action.outputs.changed-modules-map }}' | jq -e 'type == "object"' > /dev/null || {
echo "❌ Expected object type in changed-modules-map"
exit 1
}

# Test new outputs for all modules
if [[ -n "${{ steps.test-action.outputs.all-module-names }}" ]]; then
echo "✅ Output 'all-module-names': ${{ steps.test-action.outputs.all-module-names }}"
else
echo "❌ No all module names found"
fi

if [[ -n "${{ steps.test-action.outputs.all-module-paths }}" ]]; then
echo "✅ Output 'all-module-paths': ${{ steps.test-action.outputs.all-module-paths }}"
else
echo "❌ No all module paths found"
fi

if [[ -n "${{ steps.test-action.outputs.all-modules-map }}" ]]; then
echo "✅ Output 'all-modules-map':"
echo '${{ steps.test-action.outputs.all-modules-map }}' | jq -r '
"Found \(length) total module(s):",
(to_entries[] |
"• \(.key):",
" - Path: \(.value.path)",
" - Latest Tag: \(.value.latestTag)",
" - Latest Tag Version: \(.value.latestTagVersion)"
)
'
else
echo "❌ No all modules map found"
fi

# Silently validate JSON structure is an objects
echo '${{ steps.test-action.outputs.all-modules-map }}' | jq -e 'type == "object"' > /dev/null || {
echo "❌ Expected object type in all-modules-map"
exit 1
}
Empty file added screenshots/test copy 2/1
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added screenshots/test copy 3/1
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added screenshots/test copy 4/1
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added screenshots/test copy 5/1
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added screenshots/test copy/1
Empty file.
Empty file added screenshots/test copy/1 copy
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added screenshots/test copy/test/1
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added screenshots/test/1
Empty file.
Empty file added screenshots/test/1 copy
Empty file.
Empty file added screenshots/test/1 copy 10
Empty file.
Empty file added screenshots/test/1 copy 11
Empty file.
Empty file added screenshots/test/1 copy 12
Empty file.
Empty file added screenshots/test/1 copy 13
Empty file.
Empty file added screenshots/test/1 copy 14
Empty file.
Empty file added screenshots/test/1 copy 15
Empty file.
Empty file added screenshots/test/1 copy 16
Empty file.
Empty file added screenshots/test/1 copy 17
Empty file.
Empty file added screenshots/test/1 copy 18
Empty file.
Empty file added screenshots/test/1 copy 19
Empty file.
Empty file added screenshots/test/1 copy 2
Empty file.
Empty file added screenshots/test/1 copy 20
Empty file.
Empty file added screenshots/test/1 copy 21
Empty file.
Empty file added screenshots/test/1 copy 22
Empty file.
Empty file added screenshots/test/1 copy 23
Empty file.
Empty file added screenshots/test/1 copy 24
Empty file.
Empty file added screenshots/test/1 copy 25
Empty file.
Empty file added screenshots/test/1 copy 26
Empty file.
Empty file added screenshots/test/1 copy 27
Empty file.
Empty file added screenshots/test/1 copy 28
Empty file.
Empty file added screenshots/test/1 copy 29
Empty file.
Empty file added screenshots/test/1 copy 3
Empty file.
Empty file added screenshots/test/1 copy 30
Empty file.
Empty file added screenshots/test/1 copy 31
Empty file.
Empty file added screenshots/test/1 copy 32
Empty file.
Empty file added screenshots/test/1 copy 33
Empty file.
Empty file added screenshots/test/1 copy 34
Empty file.
Empty file added screenshots/test/1 copy 35
Empty file.
Empty file added screenshots/test/1 copy 4
Empty file.
Empty file added screenshots/test/1 copy 5
Empty file.
Empty file added screenshots/test/1 copy 6
Empty file.
Empty file added screenshots/test/1 copy 7
Empty file.
Empty file added screenshots/test/1 copy 8
Empty file.
Empty file added screenshots/test/1 copy 9
Empty file.
3 changes: 2 additions & 1 deletion src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ export async function run(): Promise<void> {
const terraformChangedModules = getTerraformChangedModules(terraformModules);
const terraformModuleNamesToRemove = getTerraformModulesToRemove(allTags, terraformModules);


if (!context.isPrMergeEvent) {
await handleReleasePlanComment(config, terraformChangedModules, terraformModuleNamesToRemove);
} else {
Expand Down Expand Up @@ -185,7 +186,7 @@ export async function run(): Promise<void> {
setOutput('changed-modules-map', changedModulesMap);
setOutput('all-module-names', allModuleNames);
setOutput('all-module-paths', allModulePaths);
setOutput('all-modules-map', allModulesMap);
setOutput('all-modules-map', allModulesMap);
} catch (error) {
if (error instanceof Error) {
setFailed(error.message);
Expand Down
5 changes: 5 additions & 0 deletions tf-modules2/s3-bucket-object copy 10/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# AWS S3 Bucket Object Terraform Example Module

Creates S3 bucket objects with different configurations.

## Usage
49 changes: 49 additions & 0 deletions tf-modules2/s3-bucket-object copy 10/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
resource "aws_s3_object" "this" {
count = var.create ? 1 : 0

bucket = var.bucket
key = var.key
force_destroy = var.force_destroy

acl = var.acl
storage_class = try(upper(var.storage_class), var.storage_class)

source = var.file_source
content = var.content
content_base64 = var.content_base64
etag = var.etag

cache_control = var.cache_control
content_disposition = var.content_disposition
content_encoding = var.content_encoding
content_language = var.content_language
content_type = var.content_type
website_redirect = var.website_redirect
metadata = var.metadata

server_side_encryption = var.server_side_encryption
kms_key_id = var.kms_key_id
bucket_key_enabled = var.bucket_key_enabled

object_lock_legal_hold_status = try(tobool(var.object_lock_legal_hold_status) ? "ON" : upper(var.object_lock_legal_hold_status), var.object_lock_legal_hold_status)
object_lock_mode = try(upper(var.object_lock_mode), var.object_lock_mode)
object_lock_retain_until_date = var.object_lock_retain_until_date

source_hash = var.source_hash

tags = var.tags

dynamic "override_provider" {
for_each = var.override_default_tags ? [true] : []

content {
default_tags {
tags = {}
}
}
}

lifecycle {
ignore_changes = [object_lock_retain_until_date]
}
}
14 changes: 14 additions & 0 deletions tf-modules2/s3-bucket-object copy 10/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
output "s3_object_id" {
description = "The key of S3 object"
value = try(aws_s3_object.this[0].id, "")
}

output "s3_object_etag" {
description = "The ETag generated for the object (an MD5 sum of the object content)."
value = try(aws_s3_object.this[0].etag, "")
}

output "s3_object_version_id" {
description = "A unique version ID value for the object, if bucket versioning is enabled."
value = try(aws_s3_object.this[0].version_id, "")
}
Empty file.
Empty file.
155 changes: 155 additions & 0 deletions tf-modules2/s3-bucket-object copy 10/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
variable "create" {
description = "Whether to create this resource or not?"
type = bool
default = true
}

variable "bucket" {
description = "The name of the bucket to put the file in. Alternatively, an S3 access point ARN can be specified."
type = string
default = ""
}

variable "key" {
description = "The name of the object once it is in the bucket."
type = string
default = ""
}

variable "file_source" {
description = "The path to a file that will be read and uploaded as raw bytes for the object content."
type = string
default = null
}

variable "content" {
description = "Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text."
type = string
default = null
}

variable "content_base64" {
description = "Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the gzipbase64 function with small text strings. For larger objects, use source to stream the content from a disk file."
type = string
default = null
}

variable "acl" {
description = "The canned ACL to apply. Valid values are private, public-read, public-read-write, aws-exec-read, authenticated-read, bucket-owner-read, and bucket-owner-full-control. Defaults to private."
type = string
default = null
}

variable "cache_control" {
description = "Specifies caching behavior along the request/reply chain."
type = string # map?
default = null
}

variable "content_disposition" {
description = "Specifies presentational information for the object."
type = string # map?
default = null
}

variable "content_encoding" {
description = "Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field."
type = string
default = null
}

variable "content_language" {
description = "The language the content is in e.g. en-US or en-GB."
type = string
default = null
}

variable "content_type" {
description = "A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input."
type = string
default = null
}

variable "website_redirect" {
description = "Specifies a target URL for website redirect."
type = string
default = null
}

variable "storage_class" {
description = "Specifies the desired Storage Class for the object. Can be either STANDARD, REDUCED_REDUNDANCY, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER, DEEP_ARCHIVE, or STANDARD_IA. Defaults to STANDARD."
type = string
default = null
}

variable "etag" {
description = "Used to trigger updates. This attribute is not compatible with KMS encryption, kms_key_id or server_side_encryption = \"aws:kms\"."
type = string
default = null
}

variable "server_side_encryption" {
description = "Specifies server-side encryption of the object in S3. Valid values are \"AES256\" and \"aws:kms\"."
type = string
default = null
}

variable "kms_key_id" {
description = "Amazon Resource Name (ARN) of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the aws_kms_key resource, use the arn attribute. If referencing the aws_kms_alias data source or resource, use the target_key_arn attribute. Terraform will only perform drift detection if a configuration value is provided."
type = string
default = null
}

variable "bucket_key_enabled" {
description = "Whether or not to use Amazon S3 Bucket Keys for SSE-KMS."
type = bool
default = null
}

variable "metadata" {
description = "A map of keys/values to provision metadata (will be automatically prefixed by x-amz-meta-, note that only lowercase label are currently supported by the AWS Go API)."
type = map(string)
default = {}
}

variable "tags" {
description = "A map of tags to assign to the object."
type = map(string)
default = {}
}

variable "force_destroy" {
description = "Allow the object to be deleted by removing any legal hold on any object version. Default is false. This value should be set to true only if the bucket has S3 object lock enabled."
type = bool
default = false
}

variable "object_lock_legal_hold_status" {
description = "The legal hold status that you want to apply to the specified object. Valid values are ON and OFF."
type = string
default = null
}

variable "object_lock_mode" {
description = "The object lock retention mode that you want to apply to this object. Valid values are GOVERNANCE and COMPLIANCE."
type = string
default = null
}

variable "object_lock_retain_until_date" {
description = "The date and time, in RFC3339 format, when this object's object lock will expire."
type = string
default = null
}

variable "source_hash" {
description = "Triggers updates like etag but useful to address etag encryption limitations. Set using filemd5(\"path/to/source\") (Terraform 0.11.12 or later). (The value is only stored in state and not saved by AWS.)"
type = string
default = null
}

variable "override_default_tags" {
description = "Ignore provider default_tags. S3 objects support a maximum of 10 tags."
type = bool
default = false
}
10 changes: 10 additions & 0 deletions tf-modules2/s3-bucket-object copy 10/versions.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
terraform {
required_version = ">= 1.0"

required_providers {
aws = {
source = "hashicorp/aws"
version = ">= 5.24"
}
}
}
5 changes: 5 additions & 0 deletions tf-modules2/s3-bucket-object copy 11/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# AWS S3 Bucket Object Terraform Example Module

Creates S3 bucket objects with different configurations.

## Usage
49 changes: 49 additions & 0 deletions tf-modules2/s3-bucket-object copy 11/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
resource "aws_s3_object" "this" {
count = var.create ? 1 : 0

bucket = var.bucket
key = var.key
force_destroy = var.force_destroy

acl = var.acl
storage_class = try(upper(var.storage_class), var.storage_class)

source = var.file_source
content = var.content
content_base64 = var.content_base64
etag = var.etag

cache_control = var.cache_control
content_disposition = var.content_disposition
content_encoding = var.content_encoding
content_language = var.content_language
content_type = var.content_type
website_redirect = var.website_redirect
metadata = var.metadata

server_side_encryption = var.server_side_encryption
kms_key_id = var.kms_key_id
bucket_key_enabled = var.bucket_key_enabled

object_lock_legal_hold_status = try(tobool(var.object_lock_legal_hold_status) ? "ON" : upper(var.object_lock_legal_hold_status), var.object_lock_legal_hold_status)
object_lock_mode = try(upper(var.object_lock_mode), var.object_lock_mode)
object_lock_retain_until_date = var.object_lock_retain_until_date

source_hash = var.source_hash

tags = var.tags

dynamic "override_provider" {
for_each = var.override_default_tags ? [true] : []

content {
default_tags {
tags = {}
}
}
}

lifecycle {
ignore_changes = [object_lock_retain_until_date]
}
}
Loading