diff --git a/.github/scripts/changelog.js b/.github/scripts/changelog.js index 73d21f5..3372552 100644 --- a/.github/scripts/changelog.js +++ b/.github/scripts/changelog.js @@ -19,7 +19,7 @@ import { execFileSync } from 'node:child_process'; import https from 'node:https'; -const OPENAI_MODEL = 'gpt-4-turbo-2024-04-09'; +const OPENAI_MODEL = 'gpt-4o-mini-2024-07-18'; const PROMPT = ` You're the head of developer relations at a SaaS. Write a concise, professional, and fun changelog, prioritizing important changes. @@ -33,7 +33,7 @@ For each commit, use this format: - **Bold 3-5 word Summary** {optional related GitHub emoji}: Continuation with 1-3 sentence description. @author (optional #PR) - Sub-bullets for key details (include only if necessary) -Place PR/issue numbers matching the exact pattern #\d+ (e.g., #123) at the end of the section in parentheses. +Place PR/issue numbers matching the exact pattern #\\d+ (e.g., #123) at the end of the section in parentheses. Do not use commit hashes as PR numbers. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f8ad5bd..869d029 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -49,7 +49,9 @@ jobs: disable-wiki: false wiki-sidebar-changelog-max: 10 delete-legacy-tags: false # Note: We don't want to delete tags in this repository - module-change-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/** + terraform-docs-version: v0.19.0 + module-path-ignore: tf-modules/kms/examples/complete + module-change-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/**,examples/** module-asset-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/** use-ssh-source-format: true diff --git a/README.md b/README.md index a7d076d..26da14b 100644 --- a/README.md +++ b/README.md @@ -17,31 +17,24 @@ documentation. [4]: https://github.com/techpivot/terraform-module-releaser/actions/workflows/codeql-analysis.yml [5]: https://sonarcloud.io/summary/new_code?id=terraform-module-releaser -Simplify the management of Terraform modules in your monorepo with this **GitHub Action**, designed to automate -module-specific versioning and releases. By streamlining the Terraform module release process, this action allows you to -manage multiple modules in a single repository while still maintaining independence and flexibility. Additionally, it -generates a beautifully crafted wiki for each module, complete with readme information, usage examples, Terraform-docs -details, and a full changelog. - -## Key Features - -- **Efficient Module Tagging**: Module tags are specifically designed to only include the current Terraform module - directory (and nothing else), thereby dramatically decreasing the size and improving Terraform performance. -- **Automated Release Management**: Identifies Terraform modules affected by changes in a pull request and determines - the necessary release type (major, minor, or patch) based on commit messages. -- **Versioning and Tagging**: Calculates the next version tag for each module and commits, tags, and pushes new versions - for each module individually. -- **Release Notes and Comments**: Generates a pull request comment summarizing module changes and release types, and - creates a GitHub release for each module with a dynamically generated description. -- **Wiki Integration**: Updates the wiki with new release information, including: - - README.md information for each module - - Beautifully crafted module usage examples - - `terraform-docs` details for each module - - Full changelog for each module -- **Deletes Synced**: Automatically removes tags from deleted Terraform modules, keeping your repository organized and - up-to-date. -- **Flexible Configuration**: Offers advanced input options for customization, allowing you to tailor the action to your - specific needs. +Simplify the management of Terraform modules in your monorepo with this **GitHub Action**. It automates module-specific +versioning and releases by creating proper Git tags and GitHub releases based on your commit messages. Each module +maintains independence while living in the same repository, with proper isolation for clean dependency management. +Additionally, the action generates a beautifully crafted wiki for each module, complete with readme information, usage +examples, Terraform-docs details, and a full changelog. + +## 🚀 Features + +- **Efficient Module Tagging** – Only includes module directory content, dramatically improving Terraform performance. +- **Smart Versioning** – Automatically determines release types (major, minor, patch) based on commit messages. +- **Comprehensive Wiki** – Generates beautiful documentation with usage examples, terraform-docs output, and full + changelogs. +- **Release Automation** – Creates GitHub releases, pull request comments, and version tags with minimal effort. +- **Self-Maintaining** – Automatically removes tags from deleted modules, keeping your repository clean and organized. +- **100% GitHub Native** – No external dependencies or services required for modules or operation, everything stays + within your GitHub ecosystem. +- **Zero Configuration** – Works out-of-the-box with sensible defaults for immediate productivity. +- **Flexible & Extensible** – Customizable settings to precisely match your team's specific workflow requirements. ## Demo @@ -180,10 +173,38 @@ configuring the following optional input parameters as needed. | `disable-wiki` | Whether to disable wiki generation for Terraform modules | `false` | | `wiki-sidebar-changelog-max` | An integer that specifies how many changelog entries are displayed in the sidebar per module | `5` | | `disable-branding` | Controls whether a small branding link to the action's repository is added to PR comments. Recommended to leave enabled to support OSS. | `false` | +| `module-path-ignore` | Comma separated list of module paths to completely ignore (relative to working directory). This will prevent any versioning, release, or documentation for these modules. | `` (empty) | | `module-change-exclude-patterns` | A comma-separated list of file patterns to exclude from triggering version changes in Terraform modules. Patterns follow glob syntax (e.g., `.gitignore,_.md`) and are relative to each Terraform module directory. Files matching these patterns will not affect version changes. **WARNING**: Avoid excluding '`_.tf`' files, as they are essential for module detection and versioning processes. | `.gitignore, *.md, *.tftest.hcl, tests/**` | | `module-asset-exclude-patterns` | A comma-separated list of file patterns to exclude when bundling a Terraform module for tag/release. Patterns follow glob syntax (e.g., `tests/\*\*`) and are relative to each Terraform module directory. Files matching these patterns will be excluded from the bundled output. | `.gitignore, *.md, *.tftest.hcl, tests/**` | | `use-ssh-source-format` | If enabled, all links to source code in generated Wiki documentation will use SSH standard format (e.g., `git::ssh://git@github.com/owner/repo.git`) instead of HTTPS format (`git::https://github.com/owner/repo.git`) | `false` | +### Understanding the filtering options + +- **`module-path-ignore`**: Completely ignores specified module paths. Any module whose path matches any pattern in this + list will not be processed at all by the action. This is useful for: + + - Excluding example modules (e.g., `**/examples/**`) + - Skipping test modules (e.g., `**/test/**`) + - Ignoring documentation-focused modules (e.g., `**/docs/**`) + - Excluding entire directories or paths that contain Terraform files but shouldn't be versioned as modules + + Example: + + ```yaml + module-path-ignore: "**/examples/**,**/test/**,root-modules" + ``` + +- **`module-change-exclude-patterns`**: These patterns determine which file changes are _ignored_ when checking if a + module needs a new release. For example, changes to documentation, examples, or workflow files typically don't require + a new module release. +- **`module-asset-exclude-patterns`**: When building a release asset for a module, these patterns determine which files + are _excluded_ from the asset. This helps reduce the asset size by omitting test files, examples, documentation, etc. + +All pattern matching is implemented using [minimatch](https://github.com/isaacs/minimatch), which supports glob patterns +similar to those used in `.gitignore` files. For more details on the pattern matching implementation, see our +[source code](https://github.com/techpivot/terraform-module-releaser/blob/main/src/utils/file.ts) or visit the +[minimatch documentation](https://github.com/isaacs/minimatch). + ### Example Usage with Inputs ```yml @@ -219,6 +240,7 @@ jobs: module-change-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/** module-asset-exclude-patterns: .gitignore,*.md,*.tftest.hcl,tests/** use-ssh-source-format: false + module-path-ignore: path/to/ignore1,path/to/ignore2 ``` ## Outputs diff --git a/__mocks__/config.ts b/__mocks__/config.ts index 4743323..815a2fc 100644 --- a/__mocks__/config.ts +++ b/__mocks__/config.ts @@ -21,6 +21,7 @@ const defaultConfig: Config = { disableWiki: false, wikiSidebarChangelogMax: 10, disableBranding: false, + modulePathIgnore: ['tf-modules/kms/examples/complete'], moduleChangeExcludePatterns: ['.gitignore', '*.md'], moduleAssetExcludePatterns: ['tests/**', 'examples/**'], githubToken: 'ghp_test_token_2c6912E7710c838347Ae178B4', @@ -40,6 +41,7 @@ const validConfigKeys = [ 'disableWiki', 'wikiSidebarChangelogMax', 'disableBranding', + 'modulePathIgnore', 'moduleChangeExcludePatterns', 'moduleAssetExcludePatterns', 'githubToken', diff --git a/__tests__/config.test.ts b/__tests__/config.test.ts index 2389646..be14bc2 100644 --- a/__tests__/config.test.ts +++ b/__tests__/config.test.ts @@ -1,5 +1,15 @@ import { clearConfigForTesting, config, getConfig } from '@/config'; -import { booleanConfigKeys, booleanInputs, requiredInputs, stubInputEnv } from '@/tests/helpers/inputs'; +import { + arrayInputs, + booleanInputs, + inputToConfigKey, + inputToConfigKeyMap, + optionalInputs, + requiredInputs, + stringInputs, + stubInputEnv, +} from '@/tests/helpers/inputs'; +import type { Config } from '@/types'; import { endGroup, getBooleanInput, getInput, info, startGroup } from '@actions/core'; import { beforeAll, beforeEach, describe, expect, it, vi } from 'vitest'; @@ -26,6 +36,30 @@ describe('config', () => { }); } + for (const input of optionalInputs) { + it(`should handle optional input "${input}" when not present`, () => { + stubInputEnv({ [input]: null }); + // Simply verify it doesn't throw without the specific error object + expect(() => getConfig()).not.toThrow(); + + // Get the config and check the actual value + const config = getConfig(); + // Get the config key using the mapping directly if possible + const configKey = inputToConfigKeyMap[input] || inputToConfigKey(input); + + // Type-safe access using the mapping + if (arrayInputs.includes(input)) { + // Cast configKey to keyof Config to ensure type safety + expect(config[configKey as keyof Config]).toEqual([]); + } + if (stringInputs.includes(input)) { + expect(config[configKey as keyof Config]).toEqual(''); + } + + expect(getInput).toHaveBeenCalled(); + }); + } + for (const input of booleanInputs) { it(`should throw error when input "${input}" has an invalid boolean value`, () => { stubInputEnv({ [input]: 'invalid-boolean' }); @@ -70,8 +104,10 @@ describe('config', () => { // Check the boolean conversion for each key in booleanInputs const config = getConfig(); - for (const inputKey of booleanConfigKeys) { - expect(config[inputKey]).toBe(booleanValue.toLowerCase() === 'true'); + for (const booleanInput of booleanInputs) { + // Get config key from the mapping, which is already typed as keyof Config + const configKey = inputToConfigKeyMap[booleanInput]; + expect(config[configKey]).toBe(booleanValue.toLowerCase() === 'true'); } } }); @@ -117,11 +153,11 @@ describe('config', () => { expect(config.githubToken).toBe('ghp_test_token_2c6912E7710c838347Ae178B4'); expect(config.moduleChangeExcludePatterns).toEqual(['.gitignore', '*.md']); expect(config.moduleAssetExcludePatterns).toEqual(['tests/**', 'examples/**']); + expect(config.modulePathIgnore).toEqual(['tf-modules/kms/examples/complete']); expect(config.useSSHSourceFormat).toBe(false); expect(startGroup).toHaveBeenCalledWith('Initializing Config'); expect(startGroup).toHaveBeenCalledTimes(1); expect(endGroup).toHaveBeenCalledTimes(1); - expect(info).toHaveBeenCalledTimes(11); expect(vi.mocked(info).mock.calls).toEqual([ ['Major Keywords: MAJOR CHANGE, BREAKING CHANGE, !'], ['Minor Keywords: feat, feature'], @@ -131,6 +167,7 @@ describe('config', () => { ['Delete Legacy Tags: false'], ['Disable Wiki: false'], ['Wiki Sidebar Changelog Max: 10'], + ['Module Paths to Ignore: tf-modules/kms/examples/complete'], ['Module Change Exclude Patterns: .gitignore, *.md'], ['Module Asset Exclude Patterns: tests/**, examples/**'], ['Use SSH Source Format: false'], @@ -180,5 +217,11 @@ describe('config', () => { expect(config.majorKeywords).toEqual(['BREAKING CHANGE', '!']); expect(config.moduleChangeExcludePatterns).toEqual(['.gitignore', '*.md']); }); + + it('should handle empty modulePathIgnore', () => { + stubInputEnv({ 'module-path-ignore': '' }); + const config = getConfig(); + expect(config.modulePathIgnore).toEqual([]); + }); }); }); diff --git a/__tests__/fixtures/Home.md b/__tests__/fixtures/Home.md index dc73fb8..0a31d2b 100644 --- a/__tests__/fixtures/Home.md +++ b/__tests__/fixtures/Home.md @@ -7,6 +7,8 @@ providing an overview of their functionality and the latest versions. | Module Name | Latest Version | | -- | -- | +| [kms](/techpivot/terraform-module-releaser/wiki/kms) | null | +| [kms/examples/complete](/techpivot/terraform-module-releaser/wiki/kms∕examples∕complete) | null | | [s3-bucket-object](/techpivot/terraform-module-releaser/wiki/s3‒bucket‒object) | null | | [vpc-endpoint](/techpivot/terraform-module-releaser/wiki/vpc‒endpoint) | v1.0.0 | diff --git a/__tests__/fixtures/_Sidebar.md b/__tests__/fixtures/_Sidebar.md index fb2f4bb..c633141 100644 --- a/__tests__/fixtures/_Sidebar.md +++ b/__tests__/fixtures/_Sidebar.md @@ -3,6 +3,26 @@ ## Terraform Modules
// -> /\n if (!this.preserveMultipleSlashes) {\n for (let i = 1; i < parts.length - 1; i++) {\n const p = parts[i];\n // don't squeeze out UNC patterns\n if (i === 1 && p === '' && parts[0] === '')\n continue;\n if (p === '.' || p === '') {\n didSomething = true;\n parts.splice(i, 1);\n i--;\n }\n }\n if (parts[0] === '.' &&\n parts.length === 2 &&\n (parts[1] === '.' || parts[1] === '')) {\n didSomething = true;\n parts.pop();\n }\n }\n // //../
-> /\n let dd = 0;\n while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n const p = parts[dd - 1];\n if (p && p !== '.' && p !== '..' && p !== '**') {\n didSomething = true;\n parts.splice(dd - 1, 2);\n dd -= 2;\n }\n }\n } while (didSomething);\n return parts.length === 0 ? [''] : parts;\n }\n // First phase: single-pattern processing\n // is 1 or more portions\n //is 1 or more portions\n // is any portion other than ., .., '', or **\n //
is . or ''\n //\n // **/.. is *brutal* for filesystem walking performance, because\n // it effectively resets the recursive walk each time it occurs,\n // and ** cannot be reduced out by a .. pattern part like a regexp\n // or most strings (other than .., ., and '') can be.\n //\n // /**/..//
/
-> { /..//
/
, /**//
/
}\n // // -> /\n // //../
-> /\n // **/**/ -> **/ \n //\n // **/*/ -> */**/ <== not valid because ** doesn't follow\n // this WOULD be allowed if ** did follow symlinks, or * didn't\n firstPhasePreProcess(globParts) {\n let didSomething = false;\n do {\n didSomething = false;\n // /**/..//
/
-> { /..//
/
, /**//
/
}\n for (let parts of globParts) {\n let gs = -1;\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let gss = gs;\n while (parts[gss + 1] === '**') {\n // /**/**/-> /**/\n gss++;\n }\n // eg, if gs is 2 and gss is 4, that means we have 3 **\n // parts, and can remove 2 of them.\n if (gss > gs) {\n parts.splice(gs + 1, gss - gs);\n }\n let next = parts[gs + 1];\n const p = parts[gs + 2];\n const p2 = parts[gs + 3];\n if (next !== '..')\n continue;\n if (!p ||\n p === '.' ||\n p === '..' ||\n !p2 ||\n p2 === '.' ||\n p2 === '..') {\n continue;\n }\n didSomething = true;\n // edit parts in place, and push the new one\n parts.splice(gs, 1);\n const other = parts.slice(0);\n other[gs] = '**';\n globParts.push(other);\n gs--;\n }\n // // -> /\n if (!this.preserveMultipleSlashes) {\n for (let i = 1; i < parts.length - 1; i++) {\n const p = parts[i];\n // don't squeeze out UNC patterns\n if (i === 1 && p === '' && parts[0] === '')\n continue;\n if (p === '.' || p === '') {\n didSomething = true;\n parts.splice(i, 1);\n i--;\n }\n }\n if (parts[0] === '.' &&\n parts.length === 2 &&\n (parts[1] === '.' || parts[1] === '')) {\n didSomething = true;\n parts.pop();\n }\n }\n // //../
-> /\n let dd = 0;\n while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n const p = parts[dd - 1];\n if (p && p !== '.' && p !== '..' && p !== '**') {\n didSomething = true;\n const needDot = dd === 1 && parts[dd + 1] === '**';\n const splin = needDot ? ['.'] : [];\n parts.splice(dd - 1, 2, ...splin);\n if (parts.length === 0)\n parts.push('');\n dd -= 2;\n }\n }\n }\n } while (didSomething);\n return globParts;\n }\n // second phase: multi-pattern dedupes\n // { /*/, //
} -> /*/\n // { /, /} -> /\n // { /**/, /} -> /**/\n //\n // { /**/, /**//
} -> /**/\n // ^-- not valid because ** doens't follow symlinks\n secondPhasePreProcess(globParts) {\n for (let i = 0; i < globParts.length - 1; i++) {\n for (let j = i + 1; j < globParts.length; j++) {\n const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);\n if (matched) {\n globParts[i] = [];\n globParts[j] = matched;\n break;\n }\n }\n }\n return globParts.filter(gs => gs.length);\n }\n partsMatch(a, b, emptyGSMatch = false) {\n let ai = 0;\n let bi = 0;\n let result = [];\n let which = '';\n while (ai < a.length && bi < b.length) {\n if (a[ai] === b[bi]) {\n result.push(which === 'b' ? b[bi] : a[ai]);\n ai++;\n bi++;\n }\n else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {\n result.push(a[ai]);\n ai++;\n }\n else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {\n result.push(b[bi]);\n bi++;\n }\n else if (a[ai] === '*' &&\n b[bi] &&\n (this.options.dot || !b[bi].startsWith('.')) &&\n b[bi] !== '**') {\n if (which === 'b')\n return false;\n which = 'a';\n result.push(a[ai]);\n ai++;\n bi++;\n }\n else if (b[bi] === '*' &&\n a[ai] &&\n (this.options.dot || !a[ai].startsWith('.')) &&\n a[ai] !== '**') {\n if (which === 'a')\n return false;\n which = 'b';\n result.push(b[bi]);\n ai++;\n bi++;\n }\n else {\n return false;\n }\n }\n // if we fall out of the loop, it means they two are identical\n // as long as their lengths match\n return a.length === b.length && result;\n }\n parseNegate() {\n if (this.nonegate)\n return;\n const pattern = this.pattern;\n let negate = false;\n let negateOffset = 0;\n for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {\n negate = !negate;\n negateOffset++;\n }\n if (negateOffset)\n this.pattern = pattern.slice(negateOffset);\n this.negate = negate;\n }\n // set partial to true to test if, for example,\n // \"/a/b\" matches the start of \"/*/b/*/d\"\n // Partial means, if you run out of file before you run\n // out of pattern, then that's fine, as long as all\n // the parts match.\n matchOne(file, pattern, partial = false) {\n const options = this.options;\n // UNC paths like //?/X:/... can match X:/... and vice versa\n // Drive letters in absolute drive or unc paths are always compared\n // case-insensitively.\n if (this.isWindows) {\n const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);\n const fileUNC = !fileDrive &&\n file[0] === '' &&\n file[1] === '' &&\n file[2] === '?' &&\n /^[a-z]:$/i.test(file[3]);\n const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);\n const patternUNC = !patternDrive &&\n pattern[0] === '' &&\n pattern[1] === '' &&\n pattern[2] === '?' &&\n typeof pattern[3] === 'string' &&\n /^[a-z]:$/i.test(pattern[3]);\n const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;\n const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;\n if (typeof fdi === 'number' && typeof pdi === 'number') {\n const [fd, pd] = [file[fdi], pattern[pdi]];\n if (fd.toLowerCase() === pd.toLowerCase()) {\n pattern[pdi] = fd;\n if (pdi > fdi) {\n pattern = pattern.slice(pdi);\n }\n else if (fdi > pdi) {\n file = file.slice(fdi);\n }\n }\n }\n }\n // resolve and reduce . and .. portions in the file as well.\n // dont' need to do the second phase, because it's only one string[]\n const { optimizationLevel = 1 } = this.options;\n if (optimizationLevel >= 2) {\n file = this.levelTwoFileOptimize(file);\n }\n this.debug('matchOne', this, { file, pattern });\n this.debug('matchOne', file.length, pattern.length);\n for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {\n this.debug('matchOne loop');\n var p = pattern[pi];\n var f = file[fi];\n this.debug(pattern, p, f);\n // should be impossible.\n // some invalid regexp stuff in the set.\n /* c8 ignore start */\n if (p === false) {\n return false;\n }\n /* c8 ignore stop */\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f]);\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi;\n var pr = pi + 1;\n if (pr === pl) {\n this.debug('** at the end');\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' ||\n file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.'))\n return false;\n }\n return true;\n }\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr];\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee);\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee);\n // found a match.\n return true;\n }\n else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' ||\n swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr);\n break;\n }\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue');\n fr++;\n }\n }\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n /* c8 ignore start */\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr);\n if (fr === fl) {\n return true;\n }\n }\n /* c8 ignore stop */\n return false;\n }\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n let hit;\n if (typeof p === 'string') {\n hit = f === p;\n this.debug('string match', p, f, hit);\n }\n else {\n hit = p.test(f);\n this.debug('pattern match', p, f, hit);\n }\n if (!hit)\n return false;\n }\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true;\n }\n else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial;\n }\n else if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n return fi === fl - 1 && file[fi] === '';\n /* c8 ignore start */\n }\n else {\n // should be unreachable.\n throw new Error('wtf?');\n }\n /* c8 ignore stop */\n }\n braceExpand() {\n return braceExpand(this.pattern, this.options);\n }\n parse(pattern) {\n assertValidPattern(pattern);\n const options = this.options;\n // shortcuts\n if (pattern === '**')\n return GLOBSTAR;\n if (pattern === '')\n return '';\n // far and away, the most common glob pattern parts are\n // *, *.*, and *. Add a fast check method for those.\n let m;\n let fastTest = null;\n if ((m = pattern.match(starRE))) {\n fastTest = options.dot ? starTestDot : starTest;\n }\n else if ((m = pattern.match(starDotExtRE))) {\n fastTest = (options.nocase\n ? options.dot\n ? starDotExtTestNocaseDot\n : starDotExtTestNocase\n : options.dot\n ? starDotExtTestDot\n : starDotExtTest)(m[1]);\n }\n else if ((m = pattern.match(qmarksRE))) {\n fastTest = (options.nocase\n ? options.dot\n ? qmarksTestNocaseDot\n : qmarksTestNocase\n : options.dot\n ? qmarksTestDot\n : qmarksTest)(m);\n }\n else if ((m = pattern.match(starDotStarRE))) {\n fastTest = options.dot ? starDotStarTestDot : starDotStarTest;\n }\n else if ((m = pattern.match(dotStarRE))) {\n fastTest = dotStarTest;\n }\n const re = AST.fromGlob(pattern, this.options).toMMPattern();\n if (fastTest && typeof re === 'object') {\n // Avoids overriding in frozen environments\n Reflect.defineProperty(re, 'test', { value: fastTest });\n }\n return re;\n }\n makeRe() {\n if (this.regexp || this.regexp === false)\n return this.regexp;\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n const set = this.set;\n if (!set.length) {\n this.regexp = false;\n return this.regexp;\n }\n const options = this.options;\n const twoStar = options.noglobstar\n ? star\n : options.dot\n ? twoStarDot\n : twoStarNoDot;\n const flags = new Set(options.nocase ? ['i'] : []);\n // regexpify non-globstar patterns\n // if ** is only item, then we just do one twoStar\n // if ** is first, and there are more, prepend (\\/|twoStar\\/)? to next\n // if ** is last, append (\\/twoStar|) to previous\n // if ** is in the middle, append (\\/|\\/twoStar\\/) to previous\n // then filter out GLOBSTAR symbols\n let re = set\n .map(pattern => {\n const pp = pattern.map(p => {\n if (p instanceof RegExp) {\n for (const f of p.flags.split(''))\n flags.add(f);\n }\n return typeof p === 'string'\n ? regExpEscape(p)\n : p === GLOBSTAR\n ? GLOBSTAR\n : p._src;\n });\n pp.forEach((p, i) => {\n const next = pp[i + 1];\n const prev = pp[i - 1];\n if (p !== GLOBSTAR || prev === GLOBSTAR) {\n return;\n }\n if (prev === undefined) {\n if (next !== undefined && next !== GLOBSTAR) {\n pp[i + 1] = '(?:\\\\/|' + twoStar + '\\\\/)?' + next;\n }\n else {\n pp[i] = twoStar;\n }\n }\n else if (next === undefined) {\n pp[i - 1] = prev + '(?:\\\\/|' + twoStar + ')?';\n }\n else if (next !== GLOBSTAR) {\n pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + '\\\\/)' + next;\n pp[i + 1] = GLOBSTAR;\n }\n });\n return pp.filter(p => p !== GLOBSTAR).join('/');\n })\n .join('|');\n // need to wrap in parens if we had more than one thing with |,\n // otherwise only the first will be anchored to ^ and the last to $\n const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^' + open + re + close + '$';\n // can match anything, as long as it's not this.\n if (this.negate)\n re = '^(?!' + re + ').+$';\n try {\n this.regexp = new RegExp(re, [...flags].join(''));\n /* c8 ignore start */\n }\n catch (ex) {\n // should be impossible\n this.regexp = false;\n }\n /* c8 ignore stop */\n return this.regexp;\n }\n slashSplit(p) {\n // if p starts with // on windows, we preserve that\n // so that UNC paths aren't broken. Otherwise, any number of\n // / characters are coalesced into one, unless\n // preserveMultipleSlashes is set to true.\n if (this.preserveMultipleSlashes) {\n return p.split('/');\n }\n else if (this.isWindows && /^\\/\\/[^\\/]+/.test(p)) {\n // add an extra '' for the one we lose\n return ['', ...p.split(/\\/+/)];\n }\n else {\n return p.split(/\\/+/);\n }\n }\n match(f, partial = this.partial) {\n this.debug('match', f, this.pattern);\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) {\n return false;\n }\n if (this.empty) {\n return f === '';\n }\n if (f === '/' && partial) {\n return true;\n }\n const options = this.options;\n // windows: need to use /, not \\\n if (this.isWindows) {\n f = f.split('\\\\').join('/');\n }\n // treat the test path as a set of pathparts.\n const ff = this.slashSplit(f);\n this.debug(this.pattern, 'split', ff);\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n const set = this.set;\n this.debug(this.pattern, 'set', set);\n // Find the basename of the path by looking for the last non-empty segment\n let filename = ff[ff.length - 1];\n if (!filename) {\n for (let i = ff.length - 2; !filename && i >= 0; i--) {\n filename = ff[i];\n }\n }\n for (let i = 0; i < set.length; i++) {\n const pattern = set[i];\n let file = ff;\n if (options.matchBase && pattern.length === 1) {\n file = [filename];\n }\n const hit = this.matchOne(file, pattern, partial);\n if (hit) {\n if (options.flipNegate) {\n return true;\n }\n return !this.negate;\n }\n }\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) {\n return false;\n }\n return this.negate;\n }\n static defaults(def) {\n return minimatch.defaults(def).Minimatch;\n }\n}\n/* c8 ignore start */\nexport { AST } from './ast.js';\nexport { escape } from './escape.js';\nexport { unescape } from './unescape.js';\n/* c8 ignore stop */\nminimatch.AST = AST;\nminimatch.Minimatch = Minimatch;\nminimatch.escape = escape;\nminimatch.unescape = unescape;\n//# sourceMappingURL=index.js.map","import { copyFileSync, existsSync, mkdirSync, readdirSync, rmSync, statSync } from 'node:fs';\nimport { extname, join, relative } from 'node:path';\nimport { info } from '@actions/core';\nimport { minimatch } from 'minimatch';\n\n/**\n * Checks if a directory contains any Terraform (.tf) files.\n *\n * @param {string} dirPath - The path of the directory to check.\n * @returns {boolean} True if the directory contains at least one .tf file, otherwise false.\n */\nexport function isTerraformDirectory(dirPath: string): boolean {\n return existsSync(dirPath) && readdirSync(dirPath).some((file) => extname(file) === '.tf');\n}\n\n/**\n * Checks if a file should be excluded from matching based on the defined exclude patterns\n * and relative paths from the base directory.\n *\n * @param {string} baseDirectory - The base directory to resolve relative paths against.\n * @param {string} filePath - The path of the file to check.\n * @param {string[]} excludePatterns - An array of patterns to match against for exclusion.\n * @returns {boolean} True if the file should be excluded, false otherwise.\n */\nexport function shouldExcludeFile(baseDirectory: string, filePath: string, excludePatterns: string[]): boolean {\n const relativePath = relative(baseDirectory, filePath);\n\n // Expand patterns to include both directories and their contents, then remove duplicates\n const expandedPatterns = Array.from(\n new Set(\n excludePatterns.flatMap((pattern) => [\n pattern, // Original pattern\n pattern.replace(/\\/(?:\\*\\*)?$/, ''), // Match directories themselves, like `tests2/`\n ]),\n ),\n );\n\n return expandedPatterns.some((pattern: string) => minimatch(relativePath, pattern, { matchBase: true }));\n}\n\n/**\n * Recursively copies the contents of a directory to a temporary directory,\n * excluding files that match specified patterns.\n *\n * @param {string} directory - The directory to copy from.\n * @param {string} tmpDir - The temporary directory to copy to.\n * @param {string[]} excludePatterns - An array of patterns to match against for exclusion.\n * @param {string} [baseDirectory] - The base directory for exclusion pattern matching.\n * Defaults to the source directory if not provided.\n */\nexport function copyModuleContents(\n directory: string,\n tmpDir: string,\n excludePatterns: string[],\n baseDirectory?: string,\n) {\n const baseDir = baseDirectory ?? directory;\n\n // Read the directory contents\n const filesToCopy = readdirSync(directory);\n\n info(`Copying \"${directory}\" to directory: ${tmpDir}`);\n for (const file of filesToCopy) {\n const filePath = join(directory, file);\n const stats = statSync(filePath);\n\n if (stats.isDirectory()) {\n // If the item is a directory, create the directory in tmpDir and copy its contents\n const newDir = join(tmpDir, file);\n mkdirSync(newDir, { recursive: true });\n // Note: Important we pass the original base directory.\n copyModuleContents(filePath, newDir, excludePatterns, baseDir); // Recursion for directory contents\n } else if (!shouldExcludeFile(baseDir, filePath, excludePatterns)) {\n // Handle file copying\n copyFileSync(filePath, join(tmpDir, file));\n } else {\n info(`Excluding file: ${filePath}`);\n }\n }\n}\n\n/**\n * Removes all contents of a specified directory except for specified items to preserve.\n *\n * @param directory - The path of the directory to clear.\n * @param exceptions - An array of filenames or directory names to preserve within the directory.\n *\n * This function removes all files and subdirectories within the specified directory while\n * retaining any items listed in the `exceptions` array. The names in `exceptions` should be\n * relative to the `directory` (e.g., `['.git', 'README.md']`), referring to items within the\n * directory you want to keep.\n *\n * ### Example Usage:\n *\n * Suppose you have a directory structure:\n * ```\n * /example-directory/\n * ├── .git/\n * ├── config.json\n * ├── temp/\n * └── README.md\n * ```\n *\n * Using `removeDirectoryContents('/example-directory', ['.git', 'README.md'])` will:\n * - Remove `config.json` and the `temp` folder.\n * - Preserve the `.git` directory and `README.md` file within `/example-directory`.\n *\n * **Note:**\n * - Items in `exceptions` are matched only by their names relative to the given `directory`.\n * - If the `.git` directory or `README.md` file were in a nested subdirectory within `/example-directory`,\n * you would need to adjust the `exceptions` parameter accordingly to reflect the correct relative path.\n *\n * @example\n * removeDirectoryContents('/home/user/project', ['.git', 'important-file.txt']);\n * // This would remove all contents inside `/home/user/project`, except for the `.git` directory\n * // and the `important-file.txt` file.\n */\nexport function removeDirectoryContents(directory: string, exceptions: string[] = []): void {\n if (!existsSync(directory)) {\n return;\n }\n\n for (const item of readdirSync(directory)) {\n const itemPath = join(directory, item);\n\n // Skip removal for items listed in the exceptions array\n if (!shouldExcludeFile(directory, itemPath, exceptions)) {\n //if (!exceptions.includes(item)) {\n rmSync(itemPath, { recursive: true, force: true });\n }\n }\n info(`Removed contents of directory [${directory}], preserving items: ${exceptions.join(', ')}`);\n}\n","/*\nHow it works:\n`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.\n*/\n\nclass Node {\n\tvalue;\n\tnext;\n\n\tconstructor(value) {\n\t\tthis.value = value;\n\t}\n}\n\nexport default class Queue {\n\t#head;\n\t#tail;\n\t#size;\n\n\tconstructor() {\n\t\tthis.clear();\n\t}\n\n\tenqueue(value) {\n\t\tconst node = new Node(value);\n\n\t\tif (this.#head) {\n\t\t\tthis.#tail.next = node;\n\t\t\tthis.#tail = node;\n\t\t} else {\n\t\t\tthis.#head = node;\n\t\t\tthis.#tail = node;\n\t\t}\n\n\t\tthis.#size++;\n\t}\n\n\tdequeue() {\n\t\tconst current = this.#head;\n\t\tif (!current) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis.#head = this.#head.next;\n\t\tthis.#size--;\n\t\treturn current.value;\n\t}\n\n\tpeek() {\n\t\tif (!this.#head) {\n\t\t\treturn;\n\t\t}\n\n\t\treturn this.#head.value;\n\n\t\t// TODO: Node.js 18.\n\t\t// return this.#head?.value;\n\t}\n\n\tclear() {\n\t\tthis.#head = undefined;\n\t\tthis.#tail = undefined;\n\t\tthis.#size = 0;\n\t}\n\n\tget size() {\n\t\treturn this.#size;\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tlet current = this.#head;\n\n\t\twhile (current) {\n\t\t\tyield current.value;\n\t\t\tcurrent = current.next;\n\t\t}\n\t}\n}\n","import Queue from 'yocto-queue';\n\nexport default function pLimit(concurrency) {\n\tvalidateConcurrency(concurrency);\n\n\tconst queue = new Queue();\n\tlet activeCount = 0;\n\n\tconst resumeNext = () => {\n\t\tif (activeCount < concurrency && queue.size > 0) {\n\t\t\tqueue.dequeue()();\n\t\t\t// Since `pendingCount` has been decreased by one, increase `activeCount` by one.\n\t\t\tactiveCount++;\n\t\t}\n\t};\n\n\tconst next = () => {\n\t\tactiveCount--;\n\n\t\tresumeNext();\n\t};\n\n\tconst run = async (function_, resolve, arguments_) => {\n\t\tconst result = (async () => function_(...arguments_))();\n\n\t\tresolve(result);\n\n\t\ttry {\n\t\t\tawait result;\n\t\t} catch {}\n\n\t\tnext();\n\t};\n\n\tconst enqueue = (function_, resolve, arguments_) => {\n\t\t// Queue `internalResolve` instead of the `run` function\n\t\t// to preserve asynchronous context.\n\t\tnew Promise(internalResolve => {\n\t\t\tqueue.enqueue(internalResolve);\n\t\t}).then(\n\t\t\trun.bind(undefined, function_, resolve, arguments_),\n\t\t);\n\n\t\t(async () => {\n\t\t\t// This function needs to wait until the next microtask before comparing\n\t\t\t// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously\n\t\t\t// after the `internalResolve` function is dequeued and called. The comparison in the if-statement\n\t\t\t// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.\n\t\t\tawait Promise.resolve();\n\n\t\t\tif (activeCount < concurrency) {\n\t\t\t\tresumeNext();\n\t\t\t}\n\t\t})();\n\t};\n\n\tconst generator = (function_, ...arguments_) => new Promise(resolve => {\n\t\tenqueue(function_, resolve, arguments_);\n\t});\n\n\tObject.defineProperties(generator, {\n\t\tactiveCount: {\n\t\t\tget: () => activeCount,\n\t\t},\n\t\tpendingCount: {\n\t\t\tget: () => queue.size,\n\t\t},\n\t\tclearQueue: {\n\t\t\tvalue() {\n\t\t\t\tqueue.clear();\n\t\t\t},\n\t\t},\n\t\tconcurrency: {\n\t\t\tget: () => concurrency,\n\n\t\t\tset(newConcurrency) {\n\t\t\t\tvalidateConcurrency(newConcurrency);\n\t\t\t\tconcurrency = newConcurrency;\n\n\t\t\t\tqueueMicrotask(() => {\n\t\t\t\t\t// eslint-disable-next-line no-unmodified-loop-condition\n\t\t\t\t\twhile (activeCount < concurrency && queue.size > 0) {\n\t\t\t\t\t\tresumeNext();\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t},\n\t\t},\n\t});\n\n\treturn generator;\n}\n\nexport function limitFunction(function_, option) {\n\tconst {concurrency} = option;\n\tconst limit = pLimit(concurrency);\n\n\treturn (...arguments_) => limit(() => function_(...arguments_));\n}\n\nfunction validateConcurrency(concurrency) {\n\tif (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {\n\t\tthrow new TypeError('Expected `concurrency` to be a number from 1 and up');\n\t}\n}\n","import { execFileSync } from 'node:child_process';\nimport type { ExecSyncOptions } from 'node:child_process';\nimport { existsSync, mkdirSync } from 'node:fs';\nimport * as fsp from 'node:fs/promises';\nimport { cpus } from 'node:os';\nimport { join, resolve } from 'node:path';\nimport { getModuleReleaseChangelog } from '@/changelog';\nimport { config } from '@/config';\nimport { context } from '@/context';\nimport { generateTerraformDocs } from '@/terraform-docs';\nimport type { ExecSyncError, TerraformModule } from '@/types';\nimport {\n BRANDING_WIKI,\n GITHUB_ACTIONS_BOT_EMAIL,\n GITHUB_ACTIONS_BOT_NAME,\n PROJECT_URL,\n WIKI_TITLE_REPLACEMENTS,\n} from '@/utils/constants';\nimport { removeDirectoryContents } from '@/utils/file';\nimport { endGroup, info, startGroup } from '@actions/core';\nimport pLimit from 'p-limit';\nimport which from 'which';\n\nexport enum WikiStatus {\n SUCCESS = 'SUCCESS',\n FAILURE = 'FAILURE',\n DISABLED = 'DISABLED',\n}\n\n// Special subdirectory inside the primary repository where the wiki is checked out.\nconst WIKI_SUBDIRECTORY_NAME = '.wiki';\n\n/**\n * Clones the wiki repository for the current GitHub repository into a specified subdirectory.\n *\n * This function constructs the wiki Git URL using the current repository context and executes\n * a `git clone` command with a depth of 1 to fetch only the latest commit. The subdirectory\n * for the wiki is created if it doesn't already exist. If the wiki does not exist or is not enabled,\n * an error will be caught and logged.\n *\n * Note: It's important we clone via SSH and not HTTPS. Will likely need to test cloning this for\n * self-hosted GitHub enterprise on custom domain as this hasn't been done.\n *\n * @throws {Error} If the `git clone` command fails due to issues such as the wiki not existing.\n */\nexport function checkoutWiki(): void {\n const wikiHtmlUrl = `${context.repoUrl}.wiki`;\n const wikiDirectory = resolve(context.workspaceDir, WIKI_SUBDIRECTORY_NAME);\n const execWikiOpts: ExecSyncOptions = { cwd: wikiDirectory, stdio: 'inherit' };\n\n startGroup(`Checking out wiki repository [${wikiHtmlUrl}]`);\n\n const gitPath = which.sync('git');\n\n info('Adding repository directory to the temporary git global config as a safe directory');\n execFileSync(gitPath, ['config', '--global', '--add', 'safe.directory', wikiDirectory], { stdio: 'inherit' });\n\n // Create directory if it doesn't exist\n if (!existsSync(wikiDirectory)) {\n mkdirSync(wikiDirectory);\n }\n\n // Initialize repository if needed\n const isExistingRepo = existsSync(join(wikiDirectory, '.git'));\n if (!isExistingRepo) {\n info('Initializing new repository');\n execFileSync(gitPath, ['init', '--initial-branch=master', wikiDirectory], execWikiOpts);\n }\n\n // Set or update the remote URL\n info('Configuring remote URL');\n const remoteOutput = execFileSync(gitPath, ['remote'], { cwd: wikiDirectory, encoding: 'utf8' }) || '';\n const hasRemote = remoteOutput.includes('origin');\n if (hasRemote) {\n execFileSync(gitPath, ['remote', 'set-url', 'origin', wikiHtmlUrl], execWikiOpts);\n } else {\n execFileSync(gitPath, ['remote', 'add', 'origin', wikiHtmlUrl], execWikiOpts);\n }\n\n info('Configuring authentication');\n const basicCredential = Buffer.from(`x-access-token:${config.githubToken}`, 'utf8').toString('base64');\n try {\n execFileSync(gitPath, ['config', '--local', '--unset-all', 'http.https://github.com/.extraheader'], execWikiOpts);\n } catch (error) {\n // Type guard to ensure we're handling the correct error type\n // Only ignore specific status code if needed\n if (error instanceof Error && (error as unknown as ExecSyncError).status !== 5) {\n throw error;\n }\n }\n\n execFileSync(\n gitPath,\n ['config', '--local', 'http.https://github.com/.extraheader', `Authorization: Basic ${basicCredential}`],\n execWikiOpts,\n );\n\n try {\n info('Fetching the repository');\n execFileSync(\n gitPath,\n [\n 'fetch',\n '--no-tags',\n '--prune',\n '--no-recurse-submodules',\n '--depth=1',\n 'origin',\n '+refs/heads/master*:refs/remotes/origin/master*',\n '+refs/tags/master*:refs/tags/master*',\n ],\n execWikiOpts,\n );\n\n execFileSync(gitPath, ['checkout', 'master'], execWikiOpts);\n\n info('Successfully checked out wiki repository');\n } finally {\n endGroup();\n }\n}\n\n/**\n * Generates a sanitized slug for a GitHub Wiki title by replacing specific characters in the\n * provided module name with visually similar substitutes to avoid path conflicts and improve display.\n * This function dynamically creates a regular expression from the keys in the `WIKI_TITLE_REPLACEMENTS`\n * map, ensuring any added replacements in the map will be automatically accounted for in future\n * conversions.\n *\n * **Important**: Refer to `WIKI_TITLE_REPLACEMENTS` in `constants.ts` to add or update replacement mappings.\n *\n * @param {string} moduleName - The original module name to be transformed into a GitHub Wiki-compatible slug.\n * @returns {string} - The modified module name, with specified characters replaced by corresponding entries\n * in the `WIKI_TITLE_REPLACEMENTS` map.\n *\n * @example\n * // Example usage:\n * // Assuming WIKI_TITLE_REPLACEMENTS = { '/': '∕', '-': '‒' }\n * const moduleName = 'my-module/name';\n * const wikiSlug = getWikiSlug(moduleName);\n * // Returns: \"my‒module∕name\"\n *\n * @remarks\n * This function avoids manual regex maintenance by dynamically building a character class from the keys in\n * `WIKI_TITLE_REPLACEMENTS`. To handle special characters in these keys, the `escapeForRegex` helper function\n * escapes regex metacharacters as needed.\n *\n * The `escapeForRegex` helper:\n * - Escapes metacharacters (e.g., `*`, `.`, `+`, `?`, `^`, `$`, `{`, `}`, `(`, `)`, `|`, `[`, `]`, `\\`)\n * to ensure they are interpreted literally within the regular expression.\n *\n * Dynamic regex creation:\n * - `Object.keys(WIKI_TITLE_REPLACEMENTS).map(escapeForRegex).join('')` generates an escaped sequence\n * of characters for replacement and constructs a character class for the `pattern` regex.\n *\n * Replacement logic:\n * - `moduleName.replace(pattern, match => WIKI_TITLE_REPLACEMENTS[match])` matches each specified character\n * in `moduleName` and replaces it with the mapped character from `WIKI_TITLE_REPLACEMENTS`.\n */\nfunction getWikiSlug(moduleName: string): string {\n const escapeForRegex = (char: string): string => {\n return char.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&'); // Escape special characters for regex\n };\n\n const pattern = new RegExp(`[${Object.keys(WIKI_TITLE_REPLACEMENTS).map(escapeForRegex).join('')}]`, 'g');\n\n return moduleName.replace(pattern, (match) => WIKI_TITLE_REPLACEMENTS[match]);\n}\n\n/**\n * Generates a URL to the wiki page for a given Terraform module.\n *\n * @param {string} moduleName - The name of the Terraform module. The function extracts the base name and\n * removes the file extension (if any) to form the slug.\n * @param {boolean} [relative=true] - A flag indicating whether to return a relative URL\n * (default) or an absolute URL.\n * - If `true`, returns a relative URL based on the repository owner and name.\n * - If `false`, uses the full repository URL from `context.repoUrl`.\n * @returns {string} - The full wiki link for the module based on the provided module name and URL\n * type (relative or absolute).\n *\n * @example\n * // Returns a relative URL for a module\n * getWikiLink('terraform-aws-vpc'); // \"/owner/repo/wiki/terraform-aws-vpc\"\n *\n * @example\n * // Returns an absolute URL for a module\n * getWikiLink('aws/terraform-aws-vpc', false); // \"https://github.com/owner/repo/wiki/terraform-aws-vpc\"\n */\nexport function getWikiLink(moduleName: string, relative = true): string {\n let baseUrl: string;\n if (relative) {\n baseUrl = `/${context.repo.owner}/${context.repo.repo}`;\n } else {\n baseUrl = context.repoUrl;\n }\n\n return `${baseUrl}/wiki/${getWikiSlug(moduleName)}`;\n}\n\n/**\n * Formats the module source URL based on configuration settings.\n *\n * @param repoUrl - The repository URL\n * @param useSSH - Whether to use SSH format\n * @returns The formatted source URL for the module\n */\nfunction formatModuleSource(repoUrl: string, useSSH: boolean): string {\n if (useSSH) {\n // Convert HTTPS URL to SSH format\n // From: https://github.com/owner/repo\n // To: ssh://git@github.com/owner/repo\n return `ssh://${repoUrl.replace(/^https:\\/\\/github\\.com/, 'git@github.com')}.git`;\n }\n return `${repoUrl}.git`;\n}\n\n/**\n * Generates the wiki file associated with the specified Terraform module.\n * Ensures that the directory structure is created if it doesn't exist and handles overwriting\n * the existing wiki file.\n *\n * @param {string} moduleName - The name of the Terraform module.\n * @param {string} content - The markdown content to write to the wiki file.\n * @returns {Promise } The path to the wiki file that was written.\n * @throws Will throw an error if the file cannot be written.\n */\nasync function generateWikiModule(terraformModule: TerraformModule): Promise {\n const { moduleName, latestTag } = terraformModule;\n\n const wikiSlugFile = `${getWikiSlug(moduleName)}.md`;\n const wikiFile = join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME, wikiSlugFile);\n\n // Generate a module changelog\n const changelog = getModuleReleaseChangelog(terraformModule);\n const tfDocs = await generateTerraformDocs(terraformModule);\n const moduleSource = formatModuleSource(context.repoUrl, config.useSSHSourceFormat);\n const wikiContent = [\n '# Usage\\n',\n 'To use this module in your Terraform, refer to the below module example:\\n',\n '```hcl',\n `module \"${moduleName.replace(/[^a-zA-Z0-9]/g, '_').toLowerCase()}\" {`,\n ` source = \"git::${moduleSource}?ref=${latestTag}\"`,\n '\\n # See inputs below for additional required parameters',\n '}',\n '```',\n '\\n# Attributes\\n',\n '',\n tfDocs,\n '',\n '\\n# Changelog\\n',\n changelog,\n ].join('\\n');\n\n // Write the markdown content to the wiki file, overwriting if it exists\n await fsp.writeFile(wikiFile, wikiContent, 'utf8');\n\n info(`Generated: ${wikiSlugFile}`);\n\n return wikiFile;\n}\n\n/**\n * Generates the Wiki sidebar with a list of Terraform modules, including changelog entries for each.\n *\n * This function generates a dynamic sidebar for the GitHub Wiki by iterating over the provided\n * Terraform modules, extracting their changelog content, and formatting it into a nested list\n * with relevant links to sections within each module's Wiki page (e.g., \"Usage\", \"Attributes\",\n * and \"Changelog\"). The generated content is then written to the `_Sidebar.md` file.\n *\n * @param {TerraformModule[]} terraformModules - An array of Terraform modules for which the Wiki\n * sidebar will be updated. Each module contains the `moduleName`, and its changelog is fetched\n * to generate sidebar entries.\n * @returns {Promise } - A promise that resolves with the path of the sidebar file once it has been\n * successfully updated and written.\n *\n * Function Details:\n * - Uses the `context.repo` object to get the repository owner and name for building links.\n * - The sidebar file is located in the `WIKI_DIRECTORY` and is named `_Sidebar.md`.\n * - For each module, it uses `getWikiLink()` to create the base link and `getModuleReleaseChangelog()`\n * to extract relevant changelog headings (matching `##` or `###`).\n * - Headings are converted into valid HTML IDs and displayed as linked list items (` - `),\n * limiting the number of changelog entries based on the configuration\n * (`config.wikiSidebarChangelogMax`).\n * - Writes the final content, including links to Home and the module Wiki pages, to the sidebar file.\n *\n * Example Sidebar Entry:\n * ```\n *
- \n *
\n * ```\n */\nasync function generateWikiSidebar(terraformModules: TerraformModule[]): Promise\n *\n *null/random
\n *\n *
\n *- Usage
\n *- Attributes
\n *- Changelog\n *
\n *\n *
\n *- v1.2.0 (2024-10-15)
\n *- v1.1.0 (2024-10-11)
\n *- v1.0.0 (2024-10-10)
\n *{\n const sidebarFile = join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME, '_Sidebar.md');\n const { owner, repo } = context.repo;\n const repoBaseUrl = `/${owner}/${repo}`;\n let moduleSidebarContent = '';\n\n for (const module of terraformModules) {\n const { moduleName } = module;\n\n // Get the baselink which is used throughout the sidebar\n const baselink = getWikiLink(moduleName, true);\n\n // Generate module changelog string by limiting to wikiSidebarChangelogMax\n const changelogContent = getModuleReleaseChangelog(module);\n\n // Regex to capture all headings starting with '## ' on a single line\n // Note: Use ([^\\n]+) Instead of (.+):\n // The pattern [^\\n]+ matches one or more characters that are not a newline. This restricts matches\n // to a single line and reduces backtracking possibilities since it won't consume any newlines.\n const headingRegex = /^(?:#{2,3})\\s+([^\\n]+)/gm; // Matches '##' or '###' headings\n\n // Initialize changelog entries\n const changelogEntries = [];\n let headingMatch = null;\n do {\n // If a match is found, process it\n if (headingMatch) {\n const heading = headingMatch[1].trim();\n\n // Convert heading into a valid ID string (keep only [a-zA-Z0-9-_]) But we need spaces to go to a '-'\n const idString = heading.replace(/ +/g, '-').replace(/[^a-zA-Z0-9-_]/g, '');\n\n // Append the entry to changelogEntries\n changelogEntries.push(\n ` - ${heading.replace(/`/g, '')}
`,\n );\n }\n\n // Execute the regex again for the next match\n headingMatch = headingRegex.exec(changelogContent);\n } while (headingMatch);\n\n // Limit to the maximum number of changelog entries defined in config\n const limitedChangelogEntries = changelogEntries.slice(0, config.wikiSidebarChangelogMax).join('\\n');\n\n // Wrap changelog inif it's not empty\n let changelog = '
// -> /\n if (!this.preserveMultipleSlashes) {\n for (let i = 1; i < parts.length - 1; i++) {\n const p = parts[i];\n // don't squeeze out UNC patterns\n if (i === 1 && p === '' && parts[0] === '')\n continue;\n if (p === '.' || p === '') {\n didSomething = true;\n parts.splice(i, 1);\n i--;\n }\n }\n if (parts[0] === '.' &&\n parts.length === 2 &&\n (parts[1] === '.' || parts[1] === '')) {\n didSomething = true;\n parts.pop();\n }\n }\n // //../
-> /\n let dd = 0;\n while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n const p = parts[dd - 1];\n if (p && p !== '.' && p !== '..' && p !== '**') {\n didSomething = true;\n parts.splice(dd - 1, 2);\n dd -= 2;\n }\n }\n } while (didSomething);\n return parts.length === 0 ? [''] : parts;\n }\n // First phase: single-pattern processing\n // is 1 or more portions\n //is 1 or more portions\n // is any portion other than ., .., '', or **\n //
is . or ''\n //\n // **/.. is *brutal* for filesystem walking performance, because\n // it effectively resets the recursive walk each time it occurs,\n // and ** cannot be reduced out by a .. pattern part like a regexp\n // or most strings (other than .., ., and '') can be.\n //\n // /**/..//
/
-> { /..//
/
, /**//
/
}\n // // -> /\n // //../
-> /\n // **/**/ -> **/ \n //\n // **/*/ -> */**/ <== not valid because ** doesn't follow\n // this WOULD be allowed if ** did follow symlinks, or * didn't\n firstPhasePreProcess(globParts) {\n let didSomething = false;\n do {\n didSomething = false;\n // /**/..//
/
-> { /..//
/
, /**//
/
}\n for (let parts of globParts) {\n let gs = -1;\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let gss = gs;\n while (parts[gss + 1] === '**') {\n // /**/**/-> /**/\n gss++;\n }\n // eg, if gs is 2 and gss is 4, that means we have 3 **\n // parts, and can remove 2 of them.\n if (gss > gs) {\n parts.splice(gs + 1, gss - gs);\n }\n let next = parts[gs + 1];\n const p = parts[gs + 2];\n const p2 = parts[gs + 3];\n if (next !== '..')\n continue;\n if (!p ||\n p === '.' ||\n p === '..' ||\n !p2 ||\n p2 === '.' ||\n p2 === '..') {\n continue;\n }\n didSomething = true;\n // edit parts in place, and push the new one\n parts.splice(gs, 1);\n const other = parts.slice(0);\n other[gs] = '**';\n globParts.push(other);\n gs--;\n }\n // // -> /\n if (!this.preserveMultipleSlashes) {\n for (let i = 1; i < parts.length - 1; i++) {\n const p = parts[i];\n // don't squeeze out UNC patterns\n if (i === 1 && p === '' && parts[0] === '')\n continue;\n if (p === '.' || p === '') {\n didSomething = true;\n parts.splice(i, 1);\n i--;\n }\n }\n if (parts[0] === '.' &&\n parts.length === 2 &&\n (parts[1] === '.' || parts[1] === '')) {\n didSomething = true;\n parts.pop();\n }\n }\n // //../
-> /\n let dd = 0;\n while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n const p = parts[dd - 1];\n if (p && p !== '.' && p !== '..' && p !== '**') {\n didSomething = true;\n const needDot = dd === 1 && parts[dd + 1] === '**';\n const splin = needDot ? ['.'] : [];\n parts.splice(dd - 1, 2, ...splin);\n if (parts.length === 0)\n parts.push('');\n dd -= 2;\n }\n }\n }\n } while (didSomething);\n return globParts;\n }\n // second phase: multi-pattern dedupes\n // { /*/, //
} -> /*/\n // { /, /} -> /\n // { /**/, /} -> /**/\n //\n // { /**/, /**//
} -> /**/\n // ^-- not valid because ** doens't follow symlinks\n secondPhasePreProcess(globParts) {\n for (let i = 0; i < globParts.length - 1; i++) {\n for (let j = i + 1; j < globParts.length; j++) {\n const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);\n if (matched) {\n globParts[i] = [];\n globParts[j] = matched;\n break;\n }\n }\n }\n return globParts.filter(gs => gs.length);\n }\n partsMatch(a, b, emptyGSMatch = false) {\n let ai = 0;\n let bi = 0;\n let result = [];\n let which = '';\n while (ai < a.length && bi < b.length) {\n if (a[ai] === b[bi]) {\n result.push(which === 'b' ? b[bi] : a[ai]);\n ai++;\n bi++;\n }\n else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {\n result.push(a[ai]);\n ai++;\n }\n else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {\n result.push(b[bi]);\n bi++;\n }\n else if (a[ai] === '*' &&\n b[bi] &&\n (this.options.dot || !b[bi].startsWith('.')) &&\n b[bi] !== '**') {\n if (which === 'b')\n return false;\n which = 'a';\n result.push(a[ai]);\n ai++;\n bi++;\n }\n else if (b[bi] === '*' &&\n a[ai] &&\n (this.options.dot || !a[ai].startsWith('.')) &&\n a[ai] !== '**') {\n if (which === 'a')\n return false;\n which = 'b';\n result.push(b[bi]);\n ai++;\n bi++;\n }\n else {\n return false;\n }\n }\n // if we fall out of the loop, it means they two are identical\n // as long as their lengths match\n return a.length === b.length && result;\n }\n parseNegate() {\n if (this.nonegate)\n return;\n const pattern = this.pattern;\n let negate = false;\n let negateOffset = 0;\n for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {\n negate = !negate;\n negateOffset++;\n }\n if (negateOffset)\n this.pattern = pattern.slice(negateOffset);\n this.negate = negate;\n }\n // set partial to true to test if, for example,\n // \"/a/b\" matches the start of \"/*/b/*/d\"\n // Partial means, if you run out of file before you run\n // out of pattern, then that's fine, as long as all\n // the parts match.\n matchOne(file, pattern, partial = false) {\n const options = this.options;\n // UNC paths like //?/X:/... can match X:/... and vice versa\n // Drive letters in absolute drive or unc paths are always compared\n // case-insensitively.\n if (this.isWindows) {\n const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);\n const fileUNC = !fileDrive &&\n file[0] === '' &&\n file[1] === '' &&\n file[2] === '?' &&\n /^[a-z]:$/i.test(file[3]);\n const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);\n const patternUNC = !patternDrive &&\n pattern[0] === '' &&\n pattern[1] === '' &&\n pattern[2] === '?' &&\n typeof pattern[3] === 'string' &&\n /^[a-z]:$/i.test(pattern[3]);\n const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;\n const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;\n if (typeof fdi === 'number' && typeof pdi === 'number') {\n const [fd, pd] = [file[fdi], pattern[pdi]];\n if (fd.toLowerCase() === pd.toLowerCase()) {\n pattern[pdi] = fd;\n if (pdi > fdi) {\n pattern = pattern.slice(pdi);\n }\n else if (fdi > pdi) {\n file = file.slice(fdi);\n }\n }\n }\n }\n // resolve and reduce . and .. portions in the file as well.\n // dont' need to do the second phase, because it's only one string[]\n const { optimizationLevel = 1 } = this.options;\n if (optimizationLevel >= 2) {\n file = this.levelTwoFileOptimize(file);\n }\n this.debug('matchOne', this, { file, pattern });\n this.debug('matchOne', file.length, pattern.length);\n for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {\n this.debug('matchOne loop');\n var p = pattern[pi];\n var f = file[fi];\n this.debug(pattern, p, f);\n // should be impossible.\n // some invalid regexp stuff in the set.\n /* c8 ignore start */\n if (p === false) {\n return false;\n }\n /* c8 ignore stop */\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f]);\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi;\n var pr = pi + 1;\n if (pr === pl) {\n this.debug('** at the end');\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' ||\n file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.'))\n return false;\n }\n return true;\n }\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr];\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee);\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee);\n // found a match.\n return true;\n }\n else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' ||\n swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr);\n break;\n }\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue');\n fr++;\n }\n }\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n /* c8 ignore start */\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr);\n if (fr === fl) {\n return true;\n }\n }\n /* c8 ignore stop */\n return false;\n }\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n let hit;\n if (typeof p === 'string') {\n hit = f === p;\n this.debug('string match', p, f, hit);\n }\n else {\n hit = p.test(f);\n this.debug('pattern match', p, f, hit);\n }\n if (!hit)\n return false;\n }\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true;\n }\n else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial;\n }\n else if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n return fi === fl - 1 && file[fi] === '';\n /* c8 ignore start */\n }\n else {\n // should be unreachable.\n throw new Error('wtf?');\n }\n /* c8 ignore stop */\n }\n braceExpand() {\n return braceExpand(this.pattern, this.options);\n }\n parse(pattern) {\n assertValidPattern(pattern);\n const options = this.options;\n // shortcuts\n if (pattern === '**')\n return GLOBSTAR;\n if (pattern === '')\n return '';\n // far and away, the most common glob pattern parts are\n // *, *.*, and *. Add a fast check method for those.\n let m;\n let fastTest = null;\n if ((m = pattern.match(starRE))) {\n fastTest = options.dot ? starTestDot : starTest;\n }\n else if ((m = pattern.match(starDotExtRE))) {\n fastTest = (options.nocase\n ? options.dot\n ? starDotExtTestNocaseDot\n : starDotExtTestNocase\n : options.dot\n ? starDotExtTestDot\n : starDotExtTest)(m[1]);\n }\n else if ((m = pattern.match(qmarksRE))) {\n fastTest = (options.nocase\n ? options.dot\n ? qmarksTestNocaseDot\n : qmarksTestNocase\n : options.dot\n ? qmarksTestDot\n : qmarksTest)(m);\n }\n else if ((m = pattern.match(starDotStarRE))) {\n fastTest = options.dot ? starDotStarTestDot : starDotStarTest;\n }\n else if ((m = pattern.match(dotStarRE))) {\n fastTest = dotStarTest;\n }\n const re = AST.fromGlob(pattern, this.options).toMMPattern();\n if (fastTest && typeof re === 'object') {\n // Avoids overriding in frozen environments\n Reflect.defineProperty(re, 'test', { value: fastTest });\n }\n return re;\n }\n makeRe() {\n if (this.regexp || this.regexp === false)\n return this.regexp;\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n const set = this.set;\n if (!set.length) {\n this.regexp = false;\n return this.regexp;\n }\n const options = this.options;\n const twoStar = options.noglobstar\n ? star\n : options.dot\n ? twoStarDot\n : twoStarNoDot;\n const flags = new Set(options.nocase ? ['i'] : []);\n // regexpify non-globstar patterns\n // if ** is only item, then we just do one twoStar\n // if ** is first, and there are more, prepend (\\/|twoStar\\/)? to next\n // if ** is last, append (\\/twoStar|) to previous\n // if ** is in the middle, append (\\/|\\/twoStar\\/) to previous\n // then filter out GLOBSTAR symbols\n let re = set\n .map(pattern => {\n const pp = pattern.map(p => {\n if (p instanceof RegExp) {\n for (const f of p.flags.split(''))\n flags.add(f);\n }\n return typeof p === 'string'\n ? regExpEscape(p)\n : p === GLOBSTAR\n ? GLOBSTAR\n : p._src;\n });\n pp.forEach((p, i) => {\n const next = pp[i + 1];\n const prev = pp[i - 1];\n if (p !== GLOBSTAR || prev === GLOBSTAR) {\n return;\n }\n if (prev === undefined) {\n if (next !== undefined && next !== GLOBSTAR) {\n pp[i + 1] = '(?:\\\\/|' + twoStar + '\\\\/)?' + next;\n }\n else {\n pp[i] = twoStar;\n }\n }\n else if (next === undefined) {\n pp[i - 1] = prev + '(?:\\\\/|' + twoStar + ')?';\n }\n else if (next !== GLOBSTAR) {\n pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + '\\\\/)' + next;\n pp[i + 1] = GLOBSTAR;\n }\n });\n return pp.filter(p => p !== GLOBSTAR).join('/');\n })\n .join('|');\n // need to wrap in parens if we had more than one thing with |,\n // otherwise only the first will be anchored to ^ and the last to $\n const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^' + open + re + close + '$';\n // can match anything, as long as it's not this.\n if (this.negate)\n re = '^(?!' + re + ').+$';\n try {\n this.regexp = new RegExp(re, [...flags].join(''));\n /* c8 ignore start */\n }\n catch (ex) {\n // should be impossible\n this.regexp = false;\n }\n /* c8 ignore stop */\n return this.regexp;\n }\n slashSplit(p) {\n // if p starts with // on windows, we preserve that\n // so that UNC paths aren't broken. Otherwise, any number of\n // / characters are coalesced into one, unless\n // preserveMultipleSlashes is set to true.\n if (this.preserveMultipleSlashes) {\n return p.split('/');\n }\n else if (this.isWindows && /^\\/\\/[^\\/]+/.test(p)) {\n // add an extra '' for the one we lose\n return ['', ...p.split(/\\/+/)];\n }\n else {\n return p.split(/\\/+/);\n }\n }\n match(f, partial = this.partial) {\n this.debug('match', f, this.pattern);\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) {\n return false;\n }\n if (this.empty) {\n return f === '';\n }\n if (f === '/' && partial) {\n return true;\n }\n const options = this.options;\n // windows: need to use /, not \\\n if (this.isWindows) {\n f = f.split('\\\\').join('/');\n }\n // treat the test path as a set of pathparts.\n const ff = this.slashSplit(f);\n this.debug(this.pattern, 'split', ff);\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n const set = this.set;\n this.debug(this.pattern, 'set', set);\n // Find the basename of the path by looking for the last non-empty segment\n let filename = ff[ff.length - 1];\n if (!filename) {\n for (let i = ff.length - 2; !filename && i >= 0; i--) {\n filename = ff[i];\n }\n }\n for (let i = 0; i < set.length; i++) {\n const pattern = set[i];\n let file = ff;\n if (options.matchBase && pattern.length === 1) {\n file = [filename];\n }\n const hit = this.matchOne(file, pattern, partial);\n if (hit) {\n if (options.flipNegate) {\n return true;\n }\n return !this.negate;\n }\n }\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) {\n return false;\n }\n return this.negate;\n }\n static defaults(def) {\n return minimatch.defaults(def).Minimatch;\n }\n}\n/* c8 ignore start */\nexport { AST } from './ast.js';\nexport { escape } from './escape.js';\nexport { unescape } from './unescape.js';\n/* c8 ignore stop */\nminimatch.AST = AST;\nminimatch.Minimatch = Minimatch;\nminimatch.escape = escape;\nminimatch.unescape = unescape;\n//# sourceMappingURL=index.js.map","import { copyFileSync, existsSync, mkdirSync, readdirSync, rmSync, statSync } from 'node:fs';\nimport { extname, join, relative } from 'node:path';\nimport { info } from '@actions/core';\nimport { minimatch } from 'minimatch';\n\n/**\n * Checks if a directory contains any Terraform (.tf) files.\n *\n * @param {string} dirPath - The path of the directory to check.\n * @returns {boolean} True if the directory contains at least one .tf file, otherwise false.\n */\nexport function isTerraformDirectory(dirPath: string): boolean {\n return existsSync(dirPath) && readdirSync(dirPath).some((file) => extname(file) === '.tf');\n}\n\n/**\n * Checks if a module path should be ignored based on provided ignore patterns.\n *\n * This function evaluates whether a given module path matches any of the specified ignore patterns\n * using the minimatch library for glob pattern matching.\n *\n * @remarks\n * Important pattern matching behavior notes:\n * - A pattern like \"dir/**\" will match files/directories INSIDE \"dir\" but NOT \"dir\" itself\n * - To match both a directory and its contents, you must include both patterns:\n * [\"dir\", \"dir/**\"]\n * - The function uses matchBase: false for precise path structure matching\n *\n * @example\n * // Will return false (doesn't match the directory itself)\n * shouldIgnoreModulePath('tf-modules/kms/examples/complete', ['tf-modules/kms/examples/complete/**']);\n *\n * @example\n * // Will return true (matches the exact path)\n * shouldIgnoreModulePath('tf-modules/kms/examples/complete', ['tf-modules/kms/examples/complete']);\n *\n * @param {string} modulePath - The path of the module to check.\n * @param {string[]} ignorePatterns - Array of path patterns to ignore.\n * @returns {boolean} True if the module should be ignored, false otherwise.\n */\nexport function shouldIgnoreModulePath(modulePath: string, ignorePatterns: string[]): boolean {\n if (!ignorePatterns || ignorePatterns.length === 0) {\n return false;\n }\n\n return ignorePatterns.some((pattern: string) => minimatch(modulePath, pattern, { matchBase: false }));\n}\n\n/**\n * Checks if a file should be excluded from matching based on the defined exclude patterns\n * and relative paths from the base directory.\n *\n * @param {string} baseDirectory - The base directory to resolve relative paths against.\n * @param {string} filePath - The path of the file to check.\n * @param {string[]} excludePatterns - An array of patterns to match against for exclusion.\n * @returns {boolean} True if the file should be excluded, false otherwise.\n */\nexport function shouldExcludeFile(baseDirectory: string, filePath: string, excludePatterns: string[]): boolean {\n const relativePath = relative(baseDirectory, filePath);\n\n // Expand patterns to include both directories and their contents, then remove duplicates\n const expandedPatterns = Array.from(\n new Set(\n excludePatterns.flatMap((pattern) => [\n pattern, // Original pattern\n pattern.replace(/\\/(?:\\*\\*)?$/, ''), // Match directories themselves, like `tests2/`\n ]),\n ),\n );\n\n return expandedPatterns.some((pattern: string) => minimatch(relativePath, pattern, { matchBase: true }));\n}\n\n/**\n * Recursively copies the contents of a directory to a temporary directory,\n * excluding files that match specified patterns.\n *\n * @param {string} directory - The directory to copy from.\n * @param {string} tmpDir - The temporary directory to copy to.\n * @param {string[]} excludePatterns - An array of patterns to match against for exclusion.\n * @param {string} [baseDirectory] - The base directory for exclusion pattern matching.\n * Defaults to the source directory if not provided.\n */\nexport function copyModuleContents(\n directory: string,\n tmpDir: string,\n excludePatterns: string[],\n baseDirectory?: string,\n) {\n const baseDir = baseDirectory ?? directory;\n\n // Read the directory contents\n const filesToCopy = readdirSync(directory);\n\n info(`Copying \"${directory}\" to directory: ${tmpDir}`);\n for (const file of filesToCopy) {\n const filePath = join(directory, file);\n const stats = statSync(filePath);\n\n if (stats.isDirectory()) {\n // If the item is a directory, create the directory in tmpDir and copy its contents\n const newDir = join(tmpDir, file);\n mkdirSync(newDir, { recursive: true });\n // Note: Important we pass the original base directory.\n copyModuleContents(filePath, newDir, excludePatterns, baseDir); // Recursion for directory contents\n } else if (!shouldExcludeFile(baseDir, filePath, excludePatterns)) {\n // Handle file copying\n copyFileSync(filePath, join(tmpDir, file));\n } else {\n info(`Excluding file: ${filePath}`);\n }\n }\n}\n\n/**\n * Removes all contents of a specified directory except for specified items to preserve.\n *\n * @param directory - The path of the directory to clear.\n * @param exceptions - An array of filenames or directory names to preserve within the directory.\n *\n * This function removes all files and subdirectories within the specified directory while\n * retaining any items listed in the `exceptions` array. The names in `exceptions` should be\n * relative to the `directory` (e.g., `['.git', 'README.md']`), referring to items within the\n * directory you want to keep.\n *\n * ### Example Usage:\n *\n * Suppose you have a directory structure:\n * ```\n * /example-directory/\n * ├── .git/\n * ├── config.json\n * ├── temp/\n * └── README.md\n * ```\n *\n * Using `removeDirectoryContents('/example-directory', ['.git', 'README.md'])` will:\n * - Remove `config.json` and the `temp` folder.\n * - Preserve the `.git` directory and `README.md` file within `/example-directory`.\n *\n * **Note:**\n * - Items in `exceptions` are matched only by their names relative to the given `directory`.\n * - If the `.git` directory or `README.md` file were in a nested subdirectory within `/example-directory`,\n * you would need to adjust the `exceptions` parameter accordingly to reflect the correct relative path.\n *\n * @example\n * removeDirectoryContents('/home/user/project', ['.git', 'important-file.txt']);\n * // This would remove all contents inside `/home/user/project`, except for the `.git` directory\n * // and the `important-file.txt` file.\n */\nexport function removeDirectoryContents(directory: string, exceptions: string[] = []): void {\n if (!existsSync(directory)) {\n return;\n }\n\n for (const item of readdirSync(directory)) {\n const itemPath = join(directory, item);\n\n // Skip removal for items listed in the exceptions array\n if (!shouldExcludeFile(directory, itemPath, exceptions)) {\n //if (!exceptions.includes(item)) {\n rmSync(itemPath, { recursive: true, force: true });\n }\n }\n info(`Removed contents of directory [${directory}], preserving items: ${exceptions.join(', ')}`);\n}\n","/*\nHow it works:\n`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.\n*/\n\nclass Node {\n\tvalue;\n\tnext;\n\n\tconstructor(value) {\n\t\tthis.value = value;\n\t}\n}\n\nexport default class Queue {\n\t#head;\n\t#tail;\n\t#size;\n\n\tconstructor() {\n\t\tthis.clear();\n\t}\n\n\tenqueue(value) {\n\t\tconst node = new Node(value);\n\n\t\tif (this.#head) {\n\t\t\tthis.#tail.next = node;\n\t\t\tthis.#tail = node;\n\t\t} else {\n\t\t\tthis.#head = node;\n\t\t\tthis.#tail = node;\n\t\t}\n\n\t\tthis.#size++;\n\t}\n\n\tdequeue() {\n\t\tconst current = this.#head;\n\t\tif (!current) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis.#head = this.#head.next;\n\t\tthis.#size--;\n\t\treturn current.value;\n\t}\n\n\tpeek() {\n\t\tif (!this.#head) {\n\t\t\treturn;\n\t\t}\n\n\t\treturn this.#head.value;\n\n\t\t// TODO: Node.js 18.\n\t\t// return this.#head?.value;\n\t}\n\n\tclear() {\n\t\tthis.#head = undefined;\n\t\tthis.#tail = undefined;\n\t\tthis.#size = 0;\n\t}\n\n\tget size() {\n\t\treturn this.#size;\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tlet current = this.#head;\n\n\t\twhile (current) {\n\t\t\tyield current.value;\n\t\t\tcurrent = current.next;\n\t\t}\n\t}\n\n\t* drain() {\n\t\tlet current;\n\t\twhile ((current = this.dequeue()) !== undefined) {\n\t\t\tyield current;\n\t\t}\n\t}\n}\n","import Queue from 'yocto-queue';\n\nexport default function pLimit(concurrency) {\n\tvalidateConcurrency(concurrency);\n\n\tconst queue = new Queue();\n\tlet activeCount = 0;\n\n\tconst resumeNext = () => {\n\t\tif (activeCount < concurrency && queue.size > 0) {\n\t\t\tqueue.dequeue()();\n\t\t\t// Since `pendingCount` has been decreased by one, increase `activeCount` by one.\n\t\t\tactiveCount++;\n\t\t}\n\t};\n\n\tconst next = () => {\n\t\tactiveCount--;\n\n\t\tresumeNext();\n\t};\n\n\tconst run = async (function_, resolve, arguments_) => {\n\t\tconst result = (async () => function_(...arguments_))();\n\n\t\tresolve(result);\n\n\t\ttry {\n\t\t\tawait result;\n\t\t} catch {}\n\n\t\tnext();\n\t};\n\n\tconst enqueue = (function_, resolve, arguments_) => {\n\t\t// Queue `internalResolve` instead of the `run` function\n\t\t// to preserve asynchronous context.\n\t\tnew Promise(internalResolve => {\n\t\t\tqueue.enqueue(internalResolve);\n\t\t}).then(\n\t\t\trun.bind(undefined, function_, resolve, arguments_),\n\t\t);\n\n\t\t(async () => {\n\t\t\t// This function needs to wait until the next microtask before comparing\n\t\t\t// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously\n\t\t\t// after the `internalResolve` function is dequeued and called. The comparison in the if-statement\n\t\t\t// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.\n\t\t\tawait Promise.resolve();\n\n\t\t\tif (activeCount < concurrency) {\n\t\t\t\tresumeNext();\n\t\t\t}\n\t\t})();\n\t};\n\n\tconst generator = (function_, ...arguments_) => new Promise(resolve => {\n\t\tenqueue(function_, resolve, arguments_);\n\t});\n\n\tObject.defineProperties(generator, {\n\t\tactiveCount: {\n\t\t\tget: () => activeCount,\n\t\t},\n\t\tpendingCount: {\n\t\t\tget: () => queue.size,\n\t\t},\n\t\tclearQueue: {\n\t\t\tvalue() {\n\t\t\t\tqueue.clear();\n\t\t\t},\n\t\t},\n\t\tconcurrency: {\n\t\t\tget: () => concurrency,\n\n\t\t\tset(newConcurrency) {\n\t\t\t\tvalidateConcurrency(newConcurrency);\n\t\t\t\tconcurrency = newConcurrency;\n\n\t\t\t\tqueueMicrotask(() => {\n\t\t\t\t\t// eslint-disable-next-line no-unmodified-loop-condition\n\t\t\t\t\twhile (activeCount < concurrency && queue.size > 0) {\n\t\t\t\t\t\tresumeNext();\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t},\n\t\t},\n\t});\n\n\treturn generator;\n}\n\nexport function limitFunction(function_, option) {\n\tconst {concurrency} = option;\n\tconst limit = pLimit(concurrency);\n\n\treturn (...arguments_) => limit(() => function_(...arguments_));\n}\n\nfunction validateConcurrency(concurrency) {\n\tif (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {\n\t\tthrow new TypeError('Expected `concurrency` to be a number from 1 and up');\n\t}\n}\n","import { execFileSync } from 'node:child_process';\nimport type { ExecSyncOptions } from 'node:child_process';\nimport { existsSync, mkdirSync } from 'node:fs';\nimport * as fsp from 'node:fs/promises';\nimport { cpus } from 'node:os';\nimport { join, resolve } from 'node:path';\nimport { getModuleReleaseChangelog } from '@/changelog';\nimport { config } from '@/config';\nimport { context } from '@/context';\nimport { generateTerraformDocs } from '@/terraform-docs';\nimport type { ExecSyncError, TerraformModule } from '@/types';\nimport {\n BRANDING_WIKI,\n GITHUB_ACTIONS_BOT_EMAIL,\n GITHUB_ACTIONS_BOT_NAME,\n PROJECT_URL,\n WIKI_TITLE_REPLACEMENTS,\n} from '@/utils/constants';\nimport { removeDirectoryContents } from '@/utils/file';\nimport { endGroup, info, startGroup } from '@actions/core';\nimport pLimit from 'p-limit';\nimport which from 'which';\n\nexport enum WikiStatus {\n SUCCESS = 'SUCCESS',\n FAILURE = 'FAILURE',\n DISABLED = 'DISABLED',\n}\n\n// Special subdirectory inside the primary repository where the wiki is checked out.\nconst WIKI_SUBDIRECTORY_NAME = '.wiki';\n\n/**\n * Clones the wiki repository for the current GitHub repository into a specified subdirectory.\n *\n * This function constructs the wiki Git URL using the current repository context and executes\n * a `git clone` command with a depth of 1 to fetch only the latest commit. The subdirectory\n * for the wiki is created if it doesn't already exist. If the wiki does not exist or is not enabled,\n * an error will be caught and logged.\n *\n * Note: It's important we clone via SSH and not HTTPS. Will likely need to test cloning this for\n * self-hosted GitHub enterprise on custom domain as this hasn't been done.\n *\n * @throws {Error} If the `git clone` command fails due to issues such as the wiki not existing.\n */\nexport function checkoutWiki(): void {\n const wikiHtmlUrl = `${context.repoUrl}.wiki`;\n const wikiDirectory = resolve(context.workspaceDir, WIKI_SUBDIRECTORY_NAME);\n const execWikiOpts: ExecSyncOptions = { cwd: wikiDirectory, stdio: 'inherit' };\n\n startGroup(`Checking out wiki repository [${wikiHtmlUrl}]`);\n\n const gitPath = which.sync('git');\n\n info('Adding repository directory to the temporary git global config as a safe directory');\n execFileSync(gitPath, ['config', '--global', '--add', 'safe.directory', wikiDirectory], { stdio: 'inherit' });\n\n // Create directory if it doesn't exist\n if (!existsSync(wikiDirectory)) {\n mkdirSync(wikiDirectory);\n }\n\n // Initialize repository if needed\n const isExistingRepo = existsSync(join(wikiDirectory, '.git'));\n if (!isExistingRepo) {\n info('Initializing new repository');\n execFileSync(gitPath, ['init', '--initial-branch=master', wikiDirectory], execWikiOpts);\n }\n\n // Set or update the remote URL\n info('Configuring remote URL');\n const remoteOutput = execFileSync(gitPath, ['remote'], { cwd: wikiDirectory, encoding: 'utf8' }) || '';\n const hasRemote = remoteOutput.includes('origin');\n if (hasRemote) {\n execFileSync(gitPath, ['remote', 'set-url', 'origin', wikiHtmlUrl], execWikiOpts);\n } else {\n execFileSync(gitPath, ['remote', 'add', 'origin', wikiHtmlUrl], execWikiOpts);\n }\n\n info('Configuring authentication');\n const basicCredential = Buffer.from(`x-access-token:${config.githubToken}`, 'utf8').toString('base64');\n try {\n execFileSync(gitPath, ['config', '--local', '--unset-all', 'http.https://github.com/.extraheader'], execWikiOpts);\n } catch (error) {\n // Type guard to ensure we're handling the correct error type\n // Only ignore specific status code if needed\n if (error instanceof Error && (error as unknown as ExecSyncError).status !== 5) {\n throw error;\n }\n }\n\n execFileSync(\n gitPath,\n ['config', '--local', 'http.https://github.com/.extraheader', `Authorization: Basic ${basicCredential}`],\n execWikiOpts,\n );\n\n try {\n info('Fetching the repository');\n execFileSync(\n gitPath,\n [\n 'fetch',\n '--no-tags',\n '--prune',\n '--no-recurse-submodules',\n '--depth=1',\n 'origin',\n '+refs/heads/master*:refs/remotes/origin/master*',\n '+refs/tags/master*:refs/tags/master*',\n ],\n execWikiOpts,\n );\n\n execFileSync(gitPath, ['checkout', 'master'], execWikiOpts);\n\n info('Successfully checked out wiki repository');\n } finally {\n endGroup();\n }\n}\n\n/**\n * Generates a sanitized slug for a GitHub Wiki title by replacing specific characters in the\n * provided module name with visually similar substitutes to avoid path conflicts and improve display.\n * This function dynamically creates a regular expression from the keys in the `WIKI_TITLE_REPLACEMENTS`\n * map, ensuring any added replacements in the map will be automatically accounted for in future\n * conversions.\n *\n * **Important**: Refer to `WIKI_TITLE_REPLACEMENTS` in `constants.ts` to add or update replacement mappings.\n *\n * @param {string} moduleName - The original module name to be transformed into a GitHub Wiki-compatible slug.\n * @returns {string} - The modified module name, with specified characters replaced by corresponding entries\n * in the `WIKI_TITLE_REPLACEMENTS` map.\n *\n * @example\n * // Example usage:\n * // Assuming WIKI_TITLE_REPLACEMENTS = { '/': '∕', '-': '‒' }\n * const moduleName = 'my-module/name';\n * const wikiSlug = getWikiSlug(moduleName);\n * // Returns: \"my‒module∕name\"\n *\n * @remarks\n * This function avoids manual regex maintenance by dynamically building a character class from the keys in\n * `WIKI_TITLE_REPLACEMENTS`. To handle special characters in these keys, the `escapeForRegex` helper function\n * escapes regex metacharacters as needed.\n *\n * The `escapeForRegex` helper:\n * - Escapes metacharacters (e.g., `*`, `.`, `+`, `?`, `^`, `$`, `{`, `}`, `(`, `)`, `|`, `[`, `]`, `\\`)\n * to ensure they are interpreted literally within the regular expression.\n *\n * Dynamic regex creation:\n * - `Object.keys(WIKI_TITLE_REPLACEMENTS).map(escapeForRegex).join('')` generates an escaped sequence\n * of characters for replacement and constructs a character class for the `pattern` regex.\n *\n * Replacement logic:\n * - `moduleName.replace(pattern, match => WIKI_TITLE_REPLACEMENTS[match])` matches each specified character\n * in `moduleName` and replaces it with the mapped character from `WIKI_TITLE_REPLACEMENTS`.\n */\nfunction getWikiSlug(moduleName: string): string {\n const escapeForRegex = (char: string): string => {\n return char.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&'); // Escape special characters for regex\n };\n\n const pattern = new RegExp(`[${Object.keys(WIKI_TITLE_REPLACEMENTS).map(escapeForRegex).join('')}]`, 'g');\n\n return moduleName.replace(pattern, (match) => WIKI_TITLE_REPLACEMENTS[match]);\n}\n\n/**\n * Generates a URL to the wiki page for a given Terraform module.\n *\n * @param {string} moduleName - The name of the Terraform module. The function extracts the base name and\n * removes the file extension (if any) to form the slug.\n * @param {boolean} [relative=true] - A flag indicating whether to return a relative URL\n * (default) or an absolute URL.\n * - If `true`, returns a relative URL based on the repository owner and name.\n * - If `false`, uses the full repository URL from `context.repoUrl`.\n * @returns {string} - The full wiki link for the module based on the provided module name and URL\n * type (relative or absolute).\n *\n * @example\n * // Returns a relative URL for a module\n * getWikiLink('terraform-aws-vpc'); // \"/owner/repo/wiki/terraform-aws-vpc\"\n *\n * @example\n * // Returns an absolute URL for a module\n * getWikiLink('aws/terraform-aws-vpc', false); // \"https://github.com/owner/repo/wiki/terraform-aws-vpc\"\n */\nexport function getWikiLink(moduleName: string, relative = true): string {\n let baseUrl: string;\n if (relative) {\n baseUrl = `/${context.repo.owner}/${context.repo.repo}`;\n } else {\n baseUrl = context.repoUrl;\n }\n\n return `${baseUrl}/wiki/${getWikiSlug(moduleName)}`;\n}\n\n/**\n * Formats the module source URL based on configuration settings.\n *\n * @param repoUrl - The repository URL\n * @param useSSH - Whether to use SSH format\n * @returns The formatted source URL for the module\n */\nfunction formatModuleSource(repoUrl: string, useSSH: boolean): string {\n if (useSSH) {\n // Convert HTTPS URL to SSH format\n // From: https://github.com/owner/repo\n // To: ssh://git@github.com/owner/repo\n return `ssh://${repoUrl.replace(/^https:\\/\\/github\\.com/, 'git@github.com')}.git`;\n }\n return `${repoUrl}.git`;\n}\n\n/**\n * Generates the wiki file associated with the specified Terraform module.\n * Ensures that the directory structure is created if it doesn't exist and handles overwriting\n * the existing wiki file.\n *\n * @param {string} moduleName - The name of the Terraform module.\n * @param {string} content - The markdown content to write to the wiki file.\n * @returns {Promise } The path to the wiki file that was written.\n * @throws Will throw an error if the file cannot be written.\n */\nasync function generateWikiModule(terraformModule: TerraformModule): Promise {\n const { moduleName, latestTag } = terraformModule;\n\n const wikiSlugFile = `${getWikiSlug(moduleName)}.md`;\n const wikiFile = join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME, wikiSlugFile);\n\n // Generate a module changelog\n const changelog = getModuleReleaseChangelog(terraformModule);\n const tfDocs = await generateTerraformDocs(terraformModule);\n const moduleSource = formatModuleSource(context.repoUrl, config.useSSHSourceFormat);\n const wikiContent = [\n '# Usage\\n',\n 'To use this module in your Terraform, refer to the below module example:\\n',\n '```hcl',\n `module \"${moduleName.replace(/[^a-zA-Z0-9]/g, '_').toLowerCase()}\" {`,\n ` source = \"git::${moduleSource}?ref=${latestTag}\"`,\n '\\n # See inputs below for additional required parameters',\n '}',\n '```',\n '\\n# Attributes\\n',\n '',\n tfDocs,\n '',\n '\\n# Changelog\\n',\n changelog,\n ].join('\\n');\n\n // Write the markdown content to the wiki file, overwriting if it exists\n await fsp.writeFile(wikiFile, wikiContent, 'utf8');\n\n info(`Generated: ${wikiSlugFile}`);\n\n return wikiFile;\n}\n\n/**\n * Generates the Wiki sidebar with a list of Terraform modules, including changelog entries for each.\n *\n * This function generates a dynamic sidebar for the GitHub Wiki by iterating over the provided\n * Terraform modules, extracting their changelog content, and formatting it into a nested list\n * with relevant links to sections within each module's Wiki page (e.g., \"Usage\", \"Attributes\",\n * and \"Changelog\"). The generated content is then written to the `_Sidebar.md` file.\n *\n * @param {TerraformModule[]} terraformModules - An array of Terraform modules for which the Wiki\n * sidebar will be updated. Each module contains the `moduleName`, and its changelog is fetched\n * to generate sidebar entries.\n * @returns {Promise } - A promise that resolves with the path of the sidebar file once it has been\n * successfully updated and written.\n *\n * Function Details:\n * - Uses the `context.repo` object to get the repository owner and name for building links.\n * - The sidebar file is located in the `WIKI_DIRECTORY` and is named `_Sidebar.md`.\n * - For each module, it uses `getWikiLink()` to create the base link and `getModuleReleaseChangelog()`\n * to extract relevant changelog headings (matching `##` or `###`).\n * - Headings are converted into valid HTML IDs and displayed as linked list items (` - `),\n * limiting the number of changelog entries based on the configuration\n * (`config.wikiSidebarChangelogMax`).\n * - Writes the final content, including links to Home and the module Wiki pages, to the sidebar file.\n *\n * Example Sidebar Entry:\n * ```\n *
- \n *
\n * ```\n */\nasync function generateWikiSidebar(terraformModules: TerraformModule[]): Promise\n *\n *null/random
\n *\n *
\n *- Usage
\n *- Attributes
\n *- Changelog\n *
\n *\n *
\n *- v1.2.0 (2024-10-15)
\n *- v1.1.0 (2024-10-11)
\n *- v1.0.0 (2024-10-10)
\n *{\n const sidebarFile = join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME, '_Sidebar.md');\n const { owner, repo } = context.repo;\n const repoBaseUrl = `/${owner}/${repo}`;\n let moduleSidebarContent = '';\n\n for (const module of terraformModules) {\n const { moduleName } = module;\n\n // Get the baselink which is used throughout the sidebar\n const baselink = getWikiLink(moduleName, true);\n\n // Generate module changelog string by limiting to wikiSidebarChangelogMax\n const changelogContent = getModuleReleaseChangelog(module);\n\n // Regex to capture all headings starting with '## ' on a single line\n // Note: Use ([^\\n]+) Instead of (.+):\n // The pattern [^\\n]+ matches one or more characters that are not a newline. This restricts matches\n // to a single line and reduces backtracking possibilities since it won't consume any newlines.\n const headingRegex = /^(?:#{2,3})\\s+([^\\n]+)/gm; // Matches '##' or '###' headings\n\n // Initialize changelog entries\n const changelogEntries = [];\n let headingMatch = null;\n do {\n // If a match is found, process it\n if (headingMatch) {\n const heading = headingMatch[1].trim();\n\n // Convert heading into a valid ID string (keep only [a-zA-Z0-9-_]) But we need spaces to go to a '-'\n const idString = heading.replace(/ +/g, '-').replace(/[^a-zA-Z0-9-_]/g, '');\n\n // Append the entry to changelogEntries\n changelogEntries.push(\n ` - ${heading.replace(/`/g, '')}
`,\n );\n }\n\n // Execute the regex again for the next match\n headingMatch = headingRegex.exec(changelogContent);\n } while (headingMatch);\n\n // Limit to the maximum number of changelog entries defined in config\n const limitedChangelogEntries = changelogEntries.slice(0, config.wikiSidebarChangelogMax).join('\\n');\n\n // Wrap changelog inif it's not empty\n let changelog = '';\n if (limitedChangelogEntries.length > 0) {\n changelog = `\\n
\\n${limitedChangelogEntries}\\n
\\n `;\n }\n\n moduleSidebarContent += [\n '\\n- ',\n '
',\n ].join('\\n');\n }\n\n const content = `[Home](${repoBaseUrl}/wiki/Home)\\n\\n## Terraform Modules\\n\\n',\n `',\n '${moduleName}
`,\n '',\n `
',\n '- Usage
`,\n `- Attributes
`,\n `- Changelog${changelog}`,\n '
${moduleSidebarContent}\\n
`;\n\n await fsp.writeFile(sidebarFile, content, 'utf8');\n\n info('Generated: _Sidebar.md');\n\n return sidebarFile;\n}\n\n/**\n * Generates the `_Footer.md` file in the wiki directory to maintain consistent branding content.\n *\n * This function checks whether branding is enabled:\n * - If branding is disabled, the function exits early without making any changes.\n * - If branding is enabled, it creates or updates the `_Footer.md` file with the specified branding content.\n *\n * @returns {Promise} A promise that resolves to the footer file path if updated, or undefined if no update is necessary.\n * @throws {Error} Logs an error if the file creation or update fails.\n */\nasync function generateWikiFooter(): Promise {\n if (config.disableBranding) {\n info('Skipping footer generation as branding is disabled');\n return;\n }\n\n const footerFile = join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME, '_Footer.md');\n await fsp.writeFile(footerFile, BRANDING_WIKI, 'utf8');\n info('Generated: _Footer.md');\n return footerFile;\n}\n\n/**\n * Generates the Home.md file for the Terraform Modules Wiki.\n *\n * This function creates a Markdown file that serves as an index for all available Terraform modules,\n * providing an overview of their functionality and the latest versions. It includes sections for current\n * modules, usage instructions, and contribution guidelines.\n *\n * @param {TerraformModule[]} terraformModules - An array of TerraformModule objects containing the\n * names and latest version tags of the modules.\n * @returns {Promise } A promise that resolves to the path of the generated Home.md file.\n * @throws {Error} Throws an error if the file writing operation fails.\n */\nasync function generateWikiHome(terraformModules: TerraformModule[]): Promise {\n const homeFile = join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME, 'Home.md');\n\n const content = [\n '# Terraform Modules Home',\n '\\nWelcome to the Terraform Modules Wiki! This page serves as an index for all the available Terraform modules,',\n 'providing an overview of their functionality and the latest versions.',\n '\\n## Current Terraform Modules',\n '\\n| Module Name | Latest Version |',\n '| -- | -- |',\n terraformModules\n .map(\n ({ moduleName, latestTagVersion }) =>\n `| [${moduleName}](${getWikiLink(moduleName, true)}) | ${latestTagVersion} |`,\n )\n .join('\\n'),\n '\\n## How to Use',\n '\\nEach module listed above can be imported into your Terraform configurations. For detailed instructions on',\n 'usage and examples, refer to the documentation links provided in the table.',\n '\\n## Contributing',\n 'If you would like to contribute to these modules or report issues, please visit the ',\n `[GitHub Repository](${context.repoUrl}) for more information.`,\n '\\n---',\n `\\n*This wiki is automatically generated as part of the [Terraform Module Releaser](${PROJECT_URL}) project.`,\n 'For the latest updates, please refer to the individual module documentation links above.*',\n ].join('\\n');\n\n await fsp.writeFile(homeFile, content, 'utf8');\n info('Generated: Home.md');\n\n return homeFile;\n}\n\n/**\n * Updates the wiki documentation for a list of Terraform modules.\n *\n * This function generates markdown content for each Terraform module by calling\n * `getWikiFileMarkdown` and appending its associated changelog, then writes the\n * content to the wiki. It commits and pushes the changes to the wiki repository.\n *\n * The function limits the number of concurrent wiki updates by using `pLimit`.\n * Once all wiki files are updated, it commits and pushes the changes to the repository.\n *\n * @param {TerraformModule[]} terraformModules - A list of Terraform modules to update in the wiki.\n *\n * @returns {Promise } A promise that resolves to a list of file paths of the updated wiki files.\n */\nexport async function generateWikiFiles(terraformModules: TerraformModule[]): Promise {\n startGroup('Generating wiki ...');\n\n // Clears the contents of the Wiki directory to ensure no stale content remains,\n // as the Wiki is fully regenerated during each run.\n //\n // This process:\n // - Logs the cleanup action for tracking purposes.\n // - Removes all files and directories within `WIKI_DIRECTORY` except `.git`,\n // which is preserved to maintain version control and Git history.\n //\n // This approach supports:\n // - Ensuring the Wiki remains up-to-date without leftover or outdated files.\n // - Avoiding conflicts or unexpected results due to stale data.\n info('Removing existing wiki files...');\n removeDirectoryContents(join(context.workspaceDir, WIKI_SUBDIRECTORY_NAME), ['.git']);\n\n const parallelism = cpus().length + 2;\n\n info(`Using parallelism: ${parallelism}`);\n\n const limit = pLimit(parallelism);\n const updatedFiles: string[] = [];\n const tasks = terraformModules.map((module) => {\n return limit(async () => {\n updatedFiles.push(await generateWikiModule(module));\n });\n });\n await Promise.all(tasks);\n\n updatedFiles.push(await generateWikiHome(terraformModules));\n updatedFiles.push(await generateWikiSidebar(terraformModules));\n const footerFile = await generateWikiFooter();\n if (footerFile) {\n updatedFiles.push(footerFile);\n }\n\n info('Wiki files generated:');\n console.log(updatedFiles);\n endGroup();\n\n return updatedFiles;\n}\n\n/**\n * Commits and pushes changes to the wiki repository.\n *\n * This function checks for any changes in the wiki directory, and if there are changes,\n * it commits and pushes them using the provided commit message.\n *\n * @returns {void}\n */\nexport function commitAndPushWikiChanges(): void {\n startGroup('Committing and pushing changes to wiki');\n\n try {\n const { prNumber, prTitle } = context;\n\n // Note: We originally used the PR title and PR body to create the commit message; however, due to the way\n // GitHub formats the commits/revision history for the Wiki it's designed to be smaller and thus we use\n // the PR title for now.\n // Ref: https://github.com/techpivot/terraform-modules-demo/wiki/aws%E2%88%95s3%E2%80%92bucket%E2%80%92object/_history\n // Ref: https://github.com/techpivot/terraform-module-releaser/issues/95\n const commitMessage = `PR #${prNumber} - ${prTitle}`.trim();\n const wikiDirectory = resolve(context.workspaceDir, WIKI_SUBDIRECTORY_NAME);\n const execWikiOpts: ExecSyncOptions = { cwd: wikiDirectory, stdio: 'inherit' };\n const gitPath = which.sync('git');\n\n // Check if there are any changes (otherwise add/commit/push will error)\n info('Checking for changes in wiki repository');\n const status = execFileSync(gitPath, ['status', '--porcelain'], { cwd: wikiDirectory });\n info(`git status output: ${status.toString().trim()}`);\n\n if (status !== null && status.toString().trim() !== '') {\n // There are changes, commit and push\n for (const cmd of [\n ['config', '--local', 'user.name', GITHUB_ACTIONS_BOT_NAME],\n ['config', '--local', 'user.email', GITHUB_ACTIONS_BOT_EMAIL],\n ['add', '.'],\n ['commit', '-m', commitMessage.trim()],\n ['push', 'origin'],\n ]) {\n execFileSync(gitPath, cmd, execWikiOpts);\n }\n\n info('Changes committed and pushed to wiki repository');\n } else {\n info('No changes detected, skipping commit and push');\n }\n } finally {\n endGroup();\n }\n}\n","import { getPullRequestChangelog } from '@/changelog';\nimport { config } from '@/config';\nimport { context } from '@/context';\nimport type { CommitDetails, GitHubRelease, ReleasePlanCommentOptions, TerraformChangedModule } from '@/types';\nimport { BRANDING_COMMENT, GITHUB_ACTIONS_BOT_USER_ID, PR_RELEASE_MARKER, PR_SUMMARY_MARKER } from '@/utils/constants';\nimport { WikiStatus, getWikiLink } from '@/wiki';\nimport { debug, endGroup, info, startGroup } from '@actions/core';\nimport { RequestError } from '@octokit/request-error';\n\n/**\n * Checks whether the pull request already has a comment containing the release marker.\n *\n * @returns {Promise } - Returns true if a comment with the release marker is found, false otherwise.\n */\nexport async function hasReleaseComment(): Promise {\n try {\n const {\n octokit,\n repo: { owner, repo },\n issueNumber: issue_number,\n } = context;\n\n // Fetch all comments on the pull request\n const iterator = octokit.paginate.iterator(octokit.rest.issues.listComments, {\n owner,\n repo,\n issue_number,\n });\n\n for await (const { data } of iterator) {\n for (const comment of data) {\n if (comment.user?.id === GITHUB_ACTIONS_BOT_USER_ID && comment.body?.includes(PR_RELEASE_MARKER)) {\n return true;\n }\n }\n }\n\n return false;\n } catch (error) {\n const requestError = error as RequestError;\n // If we got a 403 because the pull request doesn't have permissions. Let's really help wrap this error\n // and make it clear to the consumer what actions need to be taken.\n if (requestError.status === 403) {\n throw new Error(\n `Unable to read and write pull requests due to insufficient permissions. Ensure the workflow permissions.pull-requests is set to \"write\".\\n${requestError.message}`,\n { cause: error },\n );\n }\n\n throw new Error(`Error checking PR comments: ${error instanceof Error ? error.message : String(error)}`, {\n cause: error,\n });\n }\n}\n\n/**\n * Retrieves the list of changed files in the pull request and returns them as a Set.\n *\n * @returns {Promise >} A promise that resolves to a Set of filenames representing the changed files.\n * @throws {RequestError} Throws an error if the request to fetch files fails or if permissions are insufficient.\n */\nasync function getChangedFilesInPullRequest(): Promise > {\n try {\n const {\n octokit,\n repo: { owner, repo },\n prNumber: pull_number,\n } = context;\n\n const iterator = octokit.paginate.iterator(octokit.rest.pulls.listFiles, { owner, repo, pull_number });\n\n const changedFiles = new Set ();\n for await (const { data } of iterator) {\n for (const file of data) {\n changedFiles.add(file.filename);\n }\n }\n\n return changedFiles;\n } catch (error) {\n const requestError = error as RequestError;\n // Handle 403 error specifically for permission issues\n if (requestError.status === 403) {\n throw new Error(\n `Unable to read and write pull requests due to insufficient permissions. Ensure the workflow permissions.pull-requests is set to \"write\".\\n${requestError.message}`,\n { cause: error },\n );\n }\n\n throw new Error(`Error getting changed files in PR: ${error instanceof Error ? error.message : String(error)}`, {\n cause: error,\n });\n }\n}\n\n/**\n * Retrieves the commits associated with a specific pull request, ensuring that only true, effective file changes are tracked.\n *\n * This function first queries the entire set of changed files within the pull request, which includes files modified across\n * all commits within the PR. It then filters and processes the changes to ensure that modifications reverted by subsequent\n * commits are not tracked as effective changes. This approach helps avoid tracking transient changes that cancel each other out.\n *\n * If a pull request contains two commits, where one modifies a Terraform module and a subsequent commit reverts that modification,\n * both commits would normally be detected as changes to the module. However, the final result may not reflect any actual changes\n * if the second commit effectively reverts the first.\n *\n * To address this, we ensure that only effective file changes are tracked—ignoring changes that cancel each other out.\n *\n * First observed in this Pull Request where earlier commits triggered changes to a test Terraform module and later commits\n * reverted it: #21\n *\n * @returns {Promise } A promise that resolves to an array of commit details,\n * each containing the message, SHA, and associated file paths.\n * @throws {RequestError} Throws an error if the request to fetch commits fails or if permissions\n * are insufficient to read the pull request.\n */\nexport async function getPullRequestCommits(): Promise {\n console.time('Elapsed time fetching commits');\n startGroup('Fetching pull request commits');\n\n try {\n const {\n octokit,\n repo: { owner, repo },\n prNumber: pull_number,\n } = context;\n\n const prChangedFiles = await getChangedFilesInPullRequest();\n info(`Found ${prChangedFiles.size} file${prChangedFiles.size !== 1 ? 's' : ''} changed in pull request.`);\n info(JSON.stringify(Array.from(prChangedFiles), null, 2));\n\n const iterator = octokit.paginate.iterator(octokit.rest.pulls.listCommits, { owner, repo, pull_number });\n\n // Iterate over the fetched commits to retrieve details and files\n const commits = [];\n for await (const { data } of iterator) {\n for (const commit of data) {\n const commitDetailsResponse = await octokit.rest.repos.getCommit({\n owner,\n repo,\n ref: commit.sha,\n });\n\n // Filter files to only include those that are part of prChangedFiles\n const files =\n commitDetailsResponse.data.files\n ?.map((file) => file.filename)\n .filter((filename) => prChangedFiles.has(filename)) ?? [];\n\n commits.push({\n message: commit.commit.message,\n sha: commit.sha,\n files,\n });\n }\n }\n\n info(`Found ${commits.length} commit${commits.length !== 1 ? 's' : ''}.`);\n debug(JSON.stringify(commits, null, 2));\n\n return commits;\n } catch (error) {\n const requestError = error as RequestError;\n\n if (requestError.status === 403) {\n throw new Error(\n `Unable to read and write pull requests due to insufficient permissions. Ensure the workflow permissions.pull-requests is set to \"write\".\\n${requestError.message}`,\n { cause: error },\n );\n }\n throw error;\n /* c8 ignore next */\n } finally {\n console.timeEnd('Elapsed time fetching commits');\n endGroup();\n }\n}\n\n/**\n * Comments on a pull request with a summary of the changes made to Terraform modules,\n * including details about the release plan and any modules that will be removed from the Wiki.\n *\n * This function constructs a markdown table displaying the release plan for changed Terraform modules,\n * noting their release types and versions. It also handles modules that are no longer present in the source\n * and will be removed from the Wiki upon release.\n *\n * @param {TerraformChangedModule[]} terraformChangedModules - An array of objects representing the\n * changed Terraform modules. Each object should contain the following properties:\n * - {string} moduleName - The name of the Terraform module.\n * - {string | null} currentTagVersion - The previous version of the module (or null if this is the initial release).\n * - {string} nextTagVersion - The new version of the module to be released.\n * - {string} releaseType - The type of release (e.g., major, minor, patch).\n *\n * @param {string[]} terraformModuleNamesToRemove - An array of module names that should be removed if\n * specified to remove via config.\n *\n * @param {WikiStatus} wikiStatus - The status of the Wiki check (success, failure, or disabled) and\n * any relevant error messages if applicable.\n *\n * @returns {Promise } A promise that resolves when the comment has been posted and previous\n * comments have been deleted.\n *\n * @throws {Error} Throws an error if the GitHub API call to create a comment or delete existing comments fails.\n */\nexport async function addReleasePlanComment(\n terraformChangedModules: TerraformChangedModule[],\n terraformModuleNamesToRemove: string[],\n wikiStatus: ReleasePlanCommentOptions,\n): Promise {\n console.time('Elapsed time commenting on pull request');\n startGroup('Adding pull request release plan comment');\n\n try {\n const {\n octokit,\n repo: { owner, repo },\n issueNumber: issue_number,\n } = context;\n\n // Initialize the comment body as an array of strings\n const commentBody: string[] = [PR_SUMMARY_MARKER, '\\n# Release Plan\\n'];\n\n // Changed Modules\n if (terraformChangedModules.length === 0) {\n commentBody.push('No terraform modules updated in this pull request.');\n } else {\n commentBody.push('| Module | Release Type | Latest Version | New Version |', '|--|--|--|--|');\n for (const { moduleName, latestTagVersion, nextTagVersion, releaseType } of terraformChangedModules) {\n const initialRelease = latestTagVersion == null;\n const existingVersion = initialRelease ? 'initial' : releaseType;\n const latestTagDisplay = initialRelease ? '' : latestTagVersion;\n commentBody.push(`| \\`${moduleName}\\` | ${existingVersion} | ${latestTagDisplay} | **${nextTagVersion}** |`);\n }\n }\n\n // Wiki Check\n switch (wikiStatus.status) {\n case WikiStatus.SUCCESS:\n commentBody.push(\n '\\n> #### ✅ Wiki Check ℹ️',\n );\n break;\n case WikiStatus.FAILURE:\n commentBody.push(\n `\\n> #### ⚠️ Wiki Check: Failed to checkout wiki. ${wikiStatus.errorMessage}
Please consult the README for additional information and review logs in the latest GitHub workflow run.`,\n );\n break;\n case WikiStatus.DISABLED:\n commentBody.push('\\n> ##### 🚫 Wiki Check: Generation is disabled.');\n break;\n }\n\n // Modules to Remove\n if (terraformModuleNamesToRemove.length > 0) {\n commentBody.push(\n `\\n> **Note**: The following Terraform modules no longer exist in source; however, corresponding tags/releases exist.${\n config.deleteLegacyTags\n ? ' Automation tag/release deletion is **enabled** and corresponding tags/releases will be automatically deleted.
'\n : ' Automation tag/release deletion is **disabled** — **no** subsequent action will take place.
'\n }`,\n );\n commentBody.push(terraformModuleNamesToRemove.map((moduleName) => `\\`${moduleName}\\``).join(', '));\n }\n\n // Changelog\n if (terraformChangedModules.length > 0) {\n commentBody.push('\\n# Changelog\\n', getPullRequestChangelog(terraformChangedModules));\n }\n\n // Branding\n if (config.disableBranding === false) {\n commentBody.push(`\\n${BRANDING_COMMENT}`);\n }\n\n // Create new PR comment (Requires permission > pull-requests: write)\n const { data: newComment } = await octokit.rest.issues.createComment({\n issue_number,\n owner,\n repo,\n body: commentBody.join('\\n').trim(),\n });\n info(`Posted comment ${newComment.id} @ ${newComment.html_url}`);\n\n // Filter out the comments that contain the PR summary marker and are not the current comment\n const { data: allComments } = await octokit.rest.issues.listComments({ issue_number, owner, repo });\n const commentsToDelete = allComments.filter(\n (comment) => comment.body?.includes(PR_SUMMARY_MARKER) && comment.id !== newComment.id,\n );\n\n // Delete all our previous comments\n for (const comment of commentsToDelete) {\n info(`Deleting previous PR comment from ${comment.created_at}`);\n await octokit.rest.issues.deleteComment({ comment_id: comment.id, owner, repo });\n }\n } catch (error) {\n if (error instanceof RequestError) {\n throw new Error(\n [\n `Failed to create a comment on the pull request: ${error.message} - Ensure that the`,\n 'GitHub Actions workflow has the correct permissions to write comments. To grant the required permissions,',\n 'update your workflow YAML file with the following block under \"permissions\":\\n\\npermissions:\\n',\n ' pull-requests: write',\n ].join(' '),\n { cause: error },\n );\n }\n\n const errorMessage = error instanceof Error ? error.message.trim() : String(error).trim();\n throw new Error(`Failed to create a comment on the pull request: ${errorMessage}`, { cause: error });\n } finally {\n console.timeEnd('Elapsed time commenting on pull request');\n endGroup();\n }\n}\n\n/**\n * Posts a PR comment with details about the releases created for the Terraform modules.\n *\n * @param {Array<{ moduleName: string; release: GitHubRelease }>} updatedModules - An array of updated Terraform modules with release information.\n * @returns {Promise}\n */\nexport async function addPostReleaseComment(\n updatedModules: { moduleName: string; release: GitHubRelease }[],\n): Promise {\n if (updatedModules.length === 0) {\n info('No updated modules. Skipping post release PR comment.');\n return;\n }\n\n console.time('Elapsed time commenting on pull request');\n startGroup('Adding pull request post-release comment');\n\n try {\n const {\n octokit,\n repo: { owner, repo },\n repoUrl,\n issueNumber: issue_number,\n } = context;\n\n // Contruct the comment body as an array of strings\n const commentBody: string[] = [\n PR_RELEASE_MARKER,\n '\\n## :rocket: Terraform Module Releases\\n',\n 'The following Terraform modules have been released:\\n',\n ];\n\n for (const { moduleName, release } of updatedModules) {\n const extra = [`[Release Notes](${repoUrl}/releases/tag/${release.title})`];\n if (config.disableWiki === false) {\n extra.push(`[Wiki/Usage](${getWikiLink(moduleName, false)})`);\n }\n\n commentBody.push(`- **\\`${release.title}\\`** • ${extra.join(' • ')}`);\n }\n\n // Branding\n if (config.disableBranding === false) {\n commentBody.push(`\\n${BRANDING_COMMENT}`);\n }\n\n // Post the comment on the pull request\n const { data: newComment } = await octokit.rest.issues.createComment({\n owner,\n repo,\n issue_number,\n body: commentBody.join('\\n').trim(),\n });\n info(`Posted comment ${newComment.id} @ ${newComment.html_url}`);\n } catch (error) {\n if (error instanceof RequestError) {\n throw new Error(\n [\n `Failed to create a comment on the pull request: ${error.message} - Ensure that the`,\n 'GitHub Actions workflow has the correct permissions to write comments. To grant the required permissions,',\n 'update your workflow YAML file with the following block under \"permissions\":\\n\\npermissions:\\n',\n ' pull-requests: write',\n ].join(' '),\n { cause: error },\n );\n }\n\n const errorMessage = error instanceof Error ? error.message.trim() : String(error).trim();\n throw new Error(`Failed to create a comment on the pull request: ${errorMessage}`, { cause: error });\n } finally {\n console.timeEnd('Elapsed time commenting on pull request');\n endGroup();\n }\n}\n","import { execFileSync } from 'node:child_process';\nimport type { ExecSyncOptions } from 'node:child_process';\nimport { cpSync, mkdtempSync } from 'node:fs';\nimport { tmpdir } from 'node:os';\nimport { join } from 'node:path';\nimport { getModuleChangelog } from '@/changelog';\nimport { config } from '@/config';\nimport { context } from '@/context';\nimport type { GitHubRelease, TerraformChangedModule } from '@/types';\nimport { GITHUB_ACTIONS_BOT_EMAIL, GITHUB_ACTIONS_BOT_NAME } from '@/utils/constants';\nimport { copyModuleContents } from '@/utils/file';\nimport { debug, endGroup, info, startGroup } from '@actions/core';\nimport type { RestEndpointMethodTypes } from '@octokit/plugin-rest-endpoint-methods';\nimport { RequestError } from '@octokit/request-error';\nimport which from 'which';\n\ntype ListReleasesParams = Omit ;\n\n/**\n * Retrieves all releases from the specified GitHub repository.\n *\n * This function fetches the list of releases for the repository specified in the configuration.\n * It returns the releases as an array of objects containing the title, body, and tag name.\n *\n * @param {GetAllReleasesOptions} options - Optional configuration for the API request\n * @returns {Promise } A promise that resolves to an array of release details.\n * @throws {RequestError} Throws an error if the request to fetch releases fails.\n */\nexport async function getAllReleases(\n options: ListReleasesParams = { per_page: 100, page: 1 },\n): Promise {\n console.time('Elapsed time fetching releases'); // Start timing\n startGroup('Fetching repository releases');\n\n try {\n const {\n octokit,\n repo: { owner, repo },\n } = context;\n\n const releases: GitHubRelease[] = [];\n let totalRequests = 0;\n\n const iterator = octokit.paginate.iterator(octokit.rest.repos.listReleases, {\n ...options,\n owner,\n repo,\n });\n for await (const { data } of iterator) {\n totalRequests++;\n\n for (const release of data) {\n releases.push({\n id: release.id,\n title: release.name ?? '', // same as tag as defined in our pull request for now (no need for tag)\n body: release.body ?? '',\n tagName: release.tag_name,\n });\n }\n }\n\n debug(`Total page requests: ${totalRequests}`);\n info(`Found ${releases.length} release${releases.length !== 1 ? 's' : ''}.`);\n debug(JSON.stringify(releases, null, 2));\n\n // Note: No need to sort currently as they by default return in indexed order with most recent first.\n return releases;\n } catch (error) {\n let errorMessage: string;\n if (error instanceof RequestError) {\n errorMessage = `Failed to fetch releases: ${error.message.trim()} (status: ${error.status})`;\n } else if (error instanceof Error) {\n errorMessage = `Failed to fetch releases: ${error.message.trim()}`;\n } else {\n errorMessage = String(error).trim();\n }\n\n throw new Error(errorMessage, { cause: error });\n /* c8 ignore next */\n } finally {\n console.timeEnd('Elapsed time fetching releases');\n endGroup();\n }\n}\n\n/**\n * Creates a GitHub release and corresponding git tag for the provided Terraform modules.\n *\n * Note: Requires GitHub action permissions > contents: write\n *\n * @param {TerraformChangedModule[]} terraformChangedModules - An array of changed Terraform modules to process and create a release.\n * @returns {Promise<{ moduleName: string; release: GitHubRelease }[]>}\n */\nexport async function createTaggedRelease(\n terraformChangedModules: TerraformChangedModule[],\n): Promise<{ moduleName: string; release: GitHubRelease }[]> {\n // Check if there are any modules to process\n if (terraformChangedModules.length === 0) {\n info('No changed Terraform modules to process. Skipping tag/release creation.');\n return [];\n }\n\n const {\n octokit,\n repo: { owner, repo },\n prBody,\n prTitle,\n workspaceDir,\n } = context;\n\n console.time('Elapsed time pushing new tags & release');\n startGroup('Creating releases & tags for modules');\n\n const updatedModules: { moduleName: string; release: GitHubRelease }[] = [];\n\n try {\n for (const module of terraformChangedModules) {\n const { moduleName, directory, releaseType, nextTag, nextTagVersion } = module;\n\n info(`Release type: ${releaseType}`);\n info(`Next tag version: ${nextTag}`);\n\n // Create a temporary working directory\n // Replace '/' with '-' to create a valid directory name\n const safeName = moduleName.replace(/\\//g, '-');\n const tmpDir = mkdtempSync(join(tmpdir(), `${safeName}-`));\n info(`Created temp directory: ${tmpDir}`);\n\n // Copy the module's contents to the temporary directory, excluding specified patterns\n copyModuleContents(directory, tmpDir, config.moduleAssetExcludePatterns);\n\n // Copy the module's .git directory\n cpSync(join(workspaceDir, '.git'), join(tmpDir, '.git'), { recursive: true });\n\n // Git operations: commit the changes and tag the release\n const commitMessage = `${nextTag}\\n\\n${prTitle}\\n\\n${prBody}`.trim();\n const gitPath = await which('git');\n const gitOpts: ExecSyncOptions = { cwd: tmpDir }; // Lots of adds and deletions here so don't inherit\n\n for (const cmd of [\n ['config', '--local', 'user.name', GITHUB_ACTIONS_BOT_NAME],\n ['config', '--local', 'user.email', GITHUB_ACTIONS_BOT_EMAIL],\n ['add', '.'],\n ['commit', '-m', commitMessage.trim()],\n ['tag', nextTag],\n ['push', 'origin', nextTag],\n ]) {\n execFileSync(gitPath, cmd, gitOpts);\n }\n\n // Create a GitHub release using the tag\n info(`Creating GitHub release for ${moduleName}@${nextTag}`);\n const body = getModuleChangelog(module);\n\n const response = await octokit.rest.repos.createRelease({\n owner,\n repo,\n tag_name: nextTag,\n name: nextTag,\n body,\n draft: false,\n prerelease: false,\n });\n\n // Now update the module with latest tag and release information\n module.latestTag = nextTag;\n module.latestTagVersion = nextTagVersion;\n module.tags.unshift(nextTag); // Prepend the latest tag\n const release = {\n id: response.data.id,\n title: nextTag,\n body,\n tagName: nextTag,\n };\n module.releases.unshift(release);\n\n updatedModules.push({ moduleName, release });\n }\n\n return updatedModules;\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n\n // Handle GitHub permissions or any error related to pushing tags\n if (errorMessage.includes('The requested URL returned error: 403')) {\n throw new Error(\n [\n `Failed to create tags in repository: ${errorMessage} - Ensure that the`,\n 'GitHub Actions workflow has the correct permissions to create tags. To grant the required permissions,',\n 'update your workflow YAML file with the following block under \"permissions\":\\n\\npermissions:\\n',\n ' contents: write',\n ].join(' '),\n { cause: error },\n );\n }\n\n throw new Error(`Failed to create tags in repository: ${errorMessage}`, { cause: error });\n /* c8 ignore next */\n } finally {\n // Cleanup: remove the temp directory\n console.timeEnd('Elapsed time pushing new tags & release');\n endGroup();\n }\n}\n\n/**\n * Deletes legacy Terraform module releases.\n *\n * This function takes an array of module names and all releases,\n * and deletes the releases that match the format {moduleName}/vX.Y.Z.\n *\n * @param {string[]} terraformModuleNames - Array of Terraform module names to delete.\n * @param {GitHubRelease[]} allReleases - Array of all releases in the repository.\n * @returns {Promise }\n */\nexport async function deleteLegacyReleases(\n terraformModuleNames: string[],\n allReleases: GitHubRelease[],\n): Promise {\n if (!config.deleteLegacyTags) {\n info('Deletion of legacy tags/releases is disabled. Skipping.');\n return;\n }\n\n startGroup('Deleting legacy Terraform module releases');\n\n // Filter releases that match the format {moduleName} or {moduleName}/vX.Y.Z\n const releasesToDelete = allReleases.filter((release) => {\n return terraformModuleNames.some((name) => new RegExp(`^${name}(?:/v\\\\d+\\\\.\\\\d+\\\\.\\\\d+)?$`).test(release.title));\n });\n\n if (releasesToDelete.length === 0) {\n info('No legacy releases found to delete. Skipping.');\n endGroup();\n return;\n }\n\n info(`Found ${releasesToDelete.length} legacy release${releasesToDelete.length !== 1 ? 's' : ''} to delete.`);\n info(\n JSON.stringify(\n releasesToDelete.map((release) => release.title),\n null,\n 2,\n ),\n );\n\n console.time('Elapsed time deleting legacy releases');\n\n const {\n octokit,\n repo: { owner, repo },\n } = context;\n\n let releaseTitle = '';\n try {\n for (const { title, id: release_id } of releasesToDelete) {\n releaseTitle = title;\n info(`Deleting release: ${title}`);\n await octokit.rest.repos.deleteRelease({ owner, repo, release_id });\n }\n } catch (error) {\n const requestError = error as RequestError;\n if (requestError.status === 403) {\n throw new Error(\n [\n `Failed to delete release: ${releaseTitle} ${requestError.message}.\\nEnsure that the`,\n 'GitHub Actions workflow has the correct permissions to delete releases by ensuring that',\n 'your workflow YAML file has the following block under \"permissions\":\\n\\npermissions:\\n',\n ' contents: write',\n ].join(' '),\n { cause: error },\n );\n }\n throw new Error(`Failed to delete release: [Status = ${requestError.status}] ${requestError.message}`, {\n cause: error,\n });\n } finally {\n console.timeEnd('Elapsed time deleting legacy releases');\n endGroup();\n }\n}\n","import { config } from '@/config';\nimport { context } from '@/context';\nimport { debug, endGroup, info, startGroup } from '@actions/core';\nimport type { RestEndpointMethodTypes } from '@octokit/plugin-rest-endpoint-methods';\nimport { RequestError } from '@octokit/request-error';\n\ntype ListTagsParams = Omit ;\n\n/**\n * Fetches all tags from the specified GitHub repository.\n *\n * This function utilizes pagination to retrieve all tags, returning them as an array of strings.\n *\n * @param {GetAllTagsOptions} options - Optional configuration for the API request\n * @param {number} options.perPage - Number of items per page (default: 100)\n * @returns {Promise } A promise that resolves to an array of tag names.\n * @throws {RequestError} Throws an error if the request to fetch tags fails.\n */\nexport async function getAllTags(options: ListTagsParams = { per_page: 100, page: 1 }): Promise {\n console.time('Elapsed time fetching tags');\n startGroup('Fetching repository tags');\n\n try {\n const {\n octokit,\n repo: { owner, repo },\n } = context;\n\n const tags: string[] = [];\n let totalRequests = 0;\n\n for await (const response of octokit.paginate.iterator(octokit.rest.repos.listTags, {\n ...options,\n owner,\n repo,\n })) {\n totalRequests++;\n for (const tag of response.data) {\n tags.push(tag.name);\n }\n }\n\n debug(`Total page requests: ${totalRequests}`);\n info(`Found ${tags.length} tag${tags.length !== 1 ? 's' : ''}.`);\n debug(JSON.stringify(tags, null, 2));\n\n return tags;\n } catch (error) {\n let errorMessage: string;\n if (error instanceof RequestError) {\n errorMessage = `Failed to fetch tags: ${error.message.trim()} (status: ${error.status})`;\n } else if (error instanceof Error) {\n errorMessage = `Failed to fetch tags: ${error.message.trim()}`;\n } else {\n errorMessage = String(error).trim();\n }\n\n throw new Error(errorMessage, { cause: error });\n /* c8 ignore next */\n } finally {\n console.timeEnd('Elapsed time fetching tags');\n endGroup();\n }\n}\n\n/**\n * Deletes legacy Terraform module tags.\n *\n * This function takes an array of module names and all tags,\n * and deletes the tags that match the format {moduleName}/vX.Y.Z.\n *\n * @param {string[]} terraformModuleNames - Array of Terraform module names to delete.\n * @param {string[]} allTags - Array of all tags in the repository.\n * @returns {Promise }\n */\nexport async function deleteLegacyTags(terraformModuleNames: string[], allTags: string[]): Promise {\n if (!config.deleteLegacyTags) {\n info('Deletion of legacy tags/releases is disabled. Skipping.');\n return;\n }\n\n startGroup('Deleting legacy Terraform module tags');\n\n // Filter tags that match the format {moduleName} or {moduleName}/vX.Y.Z\n const tagsToDelete = allTags.filter((tag) => {\n return terraformModuleNames.some((name) => new RegExp(`^${name}(?:/v\\\\d+\\\\.\\\\d+\\\\.\\\\d+)?$`).test(tag));\n });\n\n if (tagsToDelete.length === 0) {\n info('No legacy tags found to delete. Skipping.');\n endGroup();\n return;\n }\n\n info(`Found ${tagsToDelete.length} legacy tag${tagsToDelete.length !== 1 ? 's' : ''} to delete.`);\n info(JSON.stringify(tagsToDelete, null, 2));\n\n console.time('Elapsed time deleting legacy tags');\n\n const {\n octokit,\n repo: { owner, repo },\n } = context;\n\n let tag = ''; // used for better error handling below.\n try {\n for (tag of tagsToDelete) {\n info(`Deleting tag: ${tag}`);\n await octokit.rest.git.deleteRef({\n owner,\n repo,\n ref: `tags/${tag}`,\n });\n }\n } catch (error) {\n const requestError = error as RequestError;\n if (requestError.status === 403) {\n throw new Error(\n [\n `Failed to delete repository tag: ${tag} ${requestError.message}.\\nEnsure that the`,\n 'GitHub Actions workflow has the correct permissions to delete tags by ensuring that',\n 'your workflow YAML file has the following block under \"permissions\":\\n\\npermissions:\\n',\n ' contents: write',\n ].join(' '),\n { cause: error },\n );\n }\n throw new Error(`Failed to delete tag: [Status = ${requestError.status}] ${requestError.message}`, {\n cause: error,\n });\n } finally {\n console.timeEnd('Elapsed time deleting legacy tags');\n endGroup();\n }\n}\n","import { config } from '@/config';\nimport type { ReleaseType } from '@/types';\n\n/**\n * Determines the release type based on the provided commit message and previous release type.\n *\n * @param message - The commit message to analyze.\n * @param previousReleaseType - The previous release type ('major', 'minor', 'patch', or null).\n * @returns The computed release type: 'major', 'minor', or 'patch'.\n */\nexport function determineReleaseType(message: string, previousReleaseType: ReleaseType | null = null): ReleaseType {\n const messageCleaned = message.toLowerCase().trim();\n\n // Destructure keywords from config\n const { majorKeywords, minorKeywords } = config;\n\n // Determine release type from message\n let currentReleaseType: ReleaseType = 'patch';\n if (majorKeywords.some((keyword) => messageCleaned.includes(keyword.toLowerCase()))) {\n currentReleaseType = 'major';\n } else if (minorKeywords.some((keyword) => messageCleaned.includes(keyword.toLowerCase()))) {\n currentReleaseType = 'minor';\n }\n\n // Determine the next release type considering the previous release type\n if (currentReleaseType === 'major' || previousReleaseType === 'major') {\n return 'major';\n }\n if (currentReleaseType === 'minor' || previousReleaseType === 'minor') {\n return 'minor';\n }\n\n // Note: For now, we don't have a separate default increment config and therefore we'll always\n // return true which somewhat negates searching for patch keywords; however, in the future\n // there may be a usecase where we make this configurable.\n return 'patch';\n}\n\n/**\n * Computes the next tag version based on the current tag and the specified release type.\n *\n * This function increments the version based on semantic versioning rules:\n * - If the release type is 'major', it increments the major version and resets the minor and patch versions.\n * - If the release type is 'minor', it increments the minor version and resets the patch version.\n * - If the release type is 'patch', it increments the patch version.\n *\n * Note: The returned version only includes the 'vX.Y.Z' portion.\n * The caller is responsible for adding the module prefix to form the complete tag (e.g., 'module-name/vX.Y.Z').\n *\n * @param {string | null} latestTagVersion - The current version tag, or null if there is no current tag.\n * @param {ReleaseType} releaseType - The type of release to be performed ('major', 'minor', or 'patch').\n * @returns {string} The computed next tag version in the format 'vX.Y.Z'.\n */\nexport function getNextTagVersion(latestTagVersion: string | null, releaseType: ReleaseType): string {\n if (latestTagVersion === null) {\n return config.defaultFirstTag;\n }\n\n // Remove 'v' prefix if present, and split by '.'\n const semver = latestTagVersion.replace(/^v/, '').split('.').map(Number);\n if (releaseType === 'major') {\n semver[0]++;\n semver[1] = 0;\n semver[2] = 0;\n } else if (releaseType === 'minor') {\n semver[1]++;\n semver[2] = 0;\n } else {\n semver[2]++;\n }\n return `v${semver.join('.')}`;\n}\n","/**\n * Removes any leading and trailing slashes (/) from the given string.\n *\n * @param {string} str - The input string from which to trim slashes.\n * @returns {string} - The string without leading or trailing slashes.\n *\n * @example\n * // Returns \"example/path\"\n * trimSlashes(\"/example/path/\");\n *\n * @example\n * // Returns \"another/example\"\n * trimSlashes(\"///another/example///\");\n */\nexport function trimSlashes(str: string): string {\n let start = 0;\n let end = str.length;\n\n // Remove leading slashes by adjusting start index\n while (start < end && str[start] === '/') {\n start++;\n }\n\n // Remove trailing slashes by adjusting end index\n while (end > start && str[end - 1] === '/') {\n end--;\n }\n\n // Return the substring without leading and trailing slashes\n return str.slice(start, end);\n}\n\n/**\n * Removes trailing dots from a string without using regex.\n *\n * This function iteratively checks each character from the end of the string\n * and removes any consecutive dots at the end. It uses a direct character-by-character\n * approach instead of regex to avoid potential backtracking issues and ensure\n * consistent O(n) performance.\n *\n * @param {string} input - The string to process\n * @returns {string} The input string with all trailing dots removed\n */\nexport function removeTrailingDots(input: string) {\n let endIndex = input.length;\n while (endIndex > 0 && input[endIndex - 1] === '.') {\n endIndex--;\n }\n return input.slice(0, endIndex);\n}\n","import { readdirSync, statSync } from 'node:fs';\nimport { dirname, isAbsolute, join, relative, resolve } from 'node:path';\nimport { config } from '@/config';\nimport { context } from '@/context';\nimport type { CommitDetails, GitHubRelease, TerraformChangedModule, TerraformModule } from '@/types';\nimport { isTerraformDirectory, shouldExcludeFile, shouldIgnoreModulePath } from '@/utils/file';\nimport { determineReleaseType, getNextTagVersion } from '@/utils/semver';\nimport { removeTrailingDots } from '@/utils/string';\nimport { debug, endGroup, info, startGroup } from '@actions/core';\n\n/**\n * Type guard function to determine if a given module is a `TerraformChangedModule`.\n *\n * This function checks if the `module` object has the property `isChanged` set to `true`.\n * It can be used to narrow down the type of the module within TypeScript's type system.\n *\n * @param {TerraformModule | TerraformChangedModule} module - The module to check.\n * @returns {module is TerraformChangedModule} - Returns `true` if the module is a `TerraformChangedModule`, otherwise `false`.\n */\nexport function isChangedModule(module: TerraformModule | TerraformChangedModule): module is TerraformChangedModule {\n return 'isChanged' in module && module.isChanged === true;\n}\n\n/**\n * Filters an array of Terraform modules to return only those that are marked as changed.\n *\n * @param modules - An array of TerraformModule or TerraformChangedModule objects.\n * @returns An array of TerraformChangedModule objects that have been marked as changed.\n */\nexport function getTerraformChangedModules(\n modules: (TerraformModule | TerraformChangedModule)[],\n): TerraformChangedModule[] {\n return modules.filter((module): module is TerraformChangedModule => {\n return (module as TerraformChangedModule).isChanged === true;\n });\n}\n\n/**\n * Generates a valid Terraform module name from the given directory path.\n *\n * The function transforms the directory path by:\n * - Trimming whitespace\n * - Replacing invalid characters with hyphens\n * - Normalizing slashes\n * - Removing leading/trailing slashes\n * - Handling consecutive dots and hyphens\n * - Removing any remaining whitespace\n * - Lowercase (for consistency)\n *\n * @param {string} terraformDirectory - The directory path from which to generate the module name.\n * @returns {string} A valid Terraform module name based on the provided directory path.\n */\nfunction getTerraformModuleNameFromRelativePath(terraformDirectory: string): string {\n const cleanedDirectory = terraformDirectory\n .trim() // Remove leading/trailing whitespace\n .replace(/[^a-zA-Z0-9/_-]+/g, '-') // Remove invalid characters, allowing a-z, A-Z, 0-9, /, _, -\n .replace(/\\/{2,}/g, '/') // Replace multiple consecutive slashes with a single slash\n .replace(/\\/\\.+/g, '/') // Remove slashes followed by dots\n .replace(/(^\\/|\\/$)/g, '') // Remove leading/trailing slashes\n .replace(/\\.\\.+/g, '.') // Replace consecutive dots with a single dot\n .replace(/--+/g, '-') // Replace consecutive hyphens with a single hyphen\n .replace(/\\s+/g, '') // Remove any remaining whitespace\n .toLowerCase(); // All of our module names will be lowercase\n\n return removeTrailingDots(cleanedDirectory);\n}\n\n/**\n * Gets the relative path of the Terraform module directory associated with a specified file.\n *\n * Traverses upward from the file's directory to locate the nearest Terraform module directory.\n * Returns the module's path relative to the current working directory.\n *\n * @param {string} filePath - The absolute or relative path of the file to analyze.\n * @returns {string | null} Relative path to the associated Terraform module directory, or null\n * if no directory is found.\n */\nfunction getTerraformModuleDirectoryRelativePath(filePath: string): string | null {\n const rootDir = resolve(context.workspaceDir);\n const absoluteFilePath = isAbsolute(filePath) ? filePath : resolve(context.workspaceDir, filePath); // Handle relative/absolute\n let directory = dirname(absoluteFilePath);\n\n // Traverse upward until the current working directory (rootDir) is reached\n while (directory !== rootDir && directory !== resolve(directory, '..')) {\n if (isTerraformDirectory(directory)) {\n return relative(rootDir, directory);\n }\n\n directory = resolve(directory, '..'); // Move up a directory\n }\n\n // Return null if no Terraform module directory is found\n return null;\n}\n\n/**\n * Retrieves the tags for a specified module directory, filtering tags that match the module pattern\n * and sorting by versioning in descending order.\n *\n * @param {string} moduleName - The Terraform module name to find current tags.\n * @param {string[]} allTags - An array of all available tags.\n * @returns {Object} An object with the latest tag, latest tag version, and an array of all matching tags.\n */\nfunction getTagsForModule(\n moduleName: string,\n allTags: string[],\n): {\n latestTag: string | null;\n latestTagVersion: string | null;\n tags: string[];\n} {\n // Filter tags that match the module directory pattern\n const tags = allTags\n .filter((tag) => tag.startsWith(`${moduleName}/v`))\n .sort((a, b) => {\n const aParts = a.replace(`${moduleName}/v`, '').split('.').map(Number);\n const bParts = b.replace(`${moduleName}/v`, '').split('.').map(Number);\n return bParts[0] - aParts[0] || bParts[1] - aParts[1] || bParts[2] - aParts[2]; // Sort in descending order\n });\n\n // Return the latest tag, latest tag version, and all matching tags\n return {\n latestTag: tags.length > 0 ? tags[0] : null, // Keep the full tag\n latestTagVersion: tags.length > 0 ? tags[0].replace(`${moduleName}/`, '') : null, // Extract version only\n tags,\n };\n}\n\n/**\n * Retrieves the relevant GitHub releases for a specified module directory.\n *\n * Filters releases for the module and sorts by version in descending order.\n *\n * @param {string} moduleName - The Terraform module name for which to find relevant release tags.\n * @param {GitHubRelease[]} allReleases - An array of GitHub releases.\n * @returns {GitHubRelease[]} An array of releases relevant to the module, sorted with the latest first.\n */\nfunction getReleasesForModule(moduleName: string, allReleases: GitHubRelease[]): GitHubRelease[] {\n // Filter releases that are relevant to the module directory\n const relevantReleases = allReleases\n .filter((release) => release.title.startsWith(`${moduleName}/`))\n .sort((a, b) => {\n // Sort releases by their title or release date (depending on what you use for sorting)\n // Assuming latest release is at the top by default or using a versioning format like vX.Y.Z\n const aVersion = a.title.replace(`${moduleName}/v`, '').split('.').map(Number);\n const bVersion = b.title.replace(`${moduleName}/v`, '').split('.').map(Number);\n return bVersion[0] - aVersion[0] || bVersion[1] - aVersion[1] || bVersion[2] - aVersion[2];\n });\n\n return relevantReleases;\n}\n\n/**\n * Retrieves all Terraform modules within the specified workspace directory and any changes based on commits.\n * Analyzes the directory structure to identify modules and checks commit history for changes.\n *\n * @param {CommitDetails[]} commits - Array of commit details to analyze for changes.\n * @param {string[]} allTags - List of all tags associated with the modules.\n * @param {GitHubRelease[]} allReleases - GitHub releases for the modules.\n * @returns {(TerraformModule | TerraformChangedModule)[]} Array of Terraform modules with their corresponding\n * change details.\n * @throws {Error} - If a module associated with a file is missing from the terraformModulesMap.\n */\n\nexport function getAllTerraformModules(\n commits: CommitDetails[],\n allTags: string[],\n allReleases: GitHubRelease[],\n): (TerraformModule | TerraformChangedModule)[] {\n startGroup('Finding all Terraform modules with corresponding changes');\n console.time('Elapsed time finding terraform modules'); // Start timing\n\n const terraformModulesMap: Record = {};\n const workspaceDir = context.workspaceDir;\n\n // Terraform only processes .tf and .tf.json files in the current working directory where you run the terraform commands. It does not automatically scan or include files from subdirectories.\n\n // Helper function to recursively search for Terraform modules\n const searchDirectory = (dir: string) => {\n const files = readdirSync(dir);\n\n for (const file of files) {\n const filePath = join(dir, file);\n const stat = statSync(filePath);\n\n // If it's a directory, recursively search inside it\n if (stat.isDirectory()) {\n if (isTerraformDirectory(filePath)) {\n const relativePath = relative(workspaceDir, filePath);\n\n // Check if this module path should be ignored\n if (shouldIgnoreModulePath(relativePath, config.modulePathIgnore)) {\n info(`Skipping module in ${relativePath} due to module-path-ignore match`);\n continue;\n }\n\n const moduleName = getTerraformModuleNameFromRelativePath(relativePath);\n terraformModulesMap[moduleName] = {\n moduleName,\n directory: filePath,\n ...getTagsForModule(moduleName, allTags),\n releases: getReleasesForModule(moduleName, allReleases),\n };\n }\n\n // We'll always recurse into subdirectories to find terraform modules even after we've found a match.\n // This is because we want to find all modules in the workspace and although not conventional, there are\n // cases where a module could be completely nested within another module and be 100% separate.\n searchDirectory(filePath); // Recurse into subdirectories\n }\n }\n };\n\n // Start the search from the workspace root directory\n info(`Searching for Terraform modules in ${workspaceDir}`);\n searchDirectory(workspaceDir);\n\n const totalModulesFound = Object.keys(terraformModulesMap).length;\n info(`Found ${totalModulesFound} Terraform module${totalModulesFound !== 1 ? 's' : ''}`);\n info('Terraform Modules:');\n info(JSON.stringify(terraformModulesMap, null, 2));\n\n // Now process commits to find changed modules\n for (const { message, sha, files } of commits) {\n info(`Parsing commit ${sha}: ${message.trim().split('\\n')[0].trim()} (Changed Files = ${files.length})`);\n\n for (const relativeFilePath of files) {\n info(`Analyzing file: ${relativeFilePath}`);\n const moduleRelativePath = getTerraformModuleDirectoryRelativePath(relativeFilePath);\n\n if (moduleRelativePath === null) {\n // File isn't associated with a Terraform module\n continue;\n }\n\n // Check if this module path should be ignored\n if (shouldIgnoreModulePath(moduleRelativePath, config.modulePathIgnore)) {\n info(` (skipping) ➜ Matches module-path-ignore pattern for path \\`${moduleRelativePath}\\``);\n continue;\n }\n\n const moduleName = getTerraformModuleNameFromRelativePath(moduleRelativePath);\n\n // Skip excluded files based on provided pattern\n if (shouldExcludeFile(moduleRelativePath, relativeFilePath, config.moduleChangeExcludePatterns)) {\n info(` (skipping) ➜ Matches module-change-exclude-pattern for path \\`${moduleRelativePath}\\``);\n continue;\n }\n\n const module = terraformModulesMap[moduleName];\n\n /* c8 ignore start */\n if (!module) {\n // Module not found in the map, this should not happen\n throw new Error(\n `Found changed file \"${relativeFilePath}\" associated with a terraform module \"${moduleName}\"; however, associated module does not exist`,\n );\n }\n /* c8 ignore stop */\n\n // Update the module with the TerraformChangedModule properties\n const releaseType = determineReleaseType(message, (module as TerraformChangedModule)?.releaseType);\n const nextTagVersion = getNextTagVersion(module.latestTagVersion, releaseType);\n const commitMessages = (module as TerraformChangedModule).commitMessages || [];\n\n if (!commitMessages.includes(message)) {\n commitMessages.push(message);\n }\n\n // Update the existing module properties\n Object.assign(module, {\n isChanged: true, // Mark as changed\n commitMessages,\n releaseType,\n nextTag: `${moduleName}/${nextTagVersion}`,\n nextTagVersion,\n });\n }\n }\n\n // Sort terraform modules by module name\n const sortedTerraformModules = Object.values(terraformModulesMap)\n .slice()\n .sort((a, b) => {\n return a.moduleName.localeCompare(b.moduleName);\n });\n\n info('Finished analyzing directory tree, terraform modules, and commits');\n info(`Found ${sortedTerraformModules.length} terraform module${sortedTerraformModules.length !== 1 ? 's' : ''}.`);\n\n let terraformChangedModules: TerraformChangedModule[] | null = getTerraformChangedModules(sortedTerraformModules);\n info(\n `Found ${terraformChangedModules.length} changed Terraform module${terraformChangedModules.length !== 1 ? 's' : ''}.`,\n );\n // Free up memory by unsetting terraformChangedModules\n terraformChangedModules = null;\n\n debug('Terraform Modules:');\n debug(JSON.stringify(sortedTerraformModules, null, 2));\n\n console.timeEnd('Elapsed time finding terraform modules');\n endGroup();\n\n return sortedTerraformModules;\n}\n\n/**\n * Determines an array of Terraform module names that need to be removed.\n *\n * @param {string[]} allTags - A list of all tags associated with the modules.\n * @param {TerraformModule[]} terraformModules - An array of Terraform modules.\n * @returns {string[]} An array of Terraform module names that need to be removed.\n */\nexport function getTerraformModulesToRemove(allTags: string[], terraformModules: TerraformModule[]): string[] {\n startGroup('Finding all Terraform modules that should be removed');\n\n // Get an array of all module names from the tags\n const moduleNamesFromTags = Array.from(\n new Set(\n allTags\n // Currently, we will remove all tags. If we wanted to allow other tags that didnt\n // take the form of moduleName/vX.Y.Z, we could filter them out here. However, the purpose\n // of this monorepo terraform releaser is repo-encompassing and thus if someone has a\n // dangling tag, we should ideally remove it.\n //.filter((tag) => {\n // return /^.*\\/v\\d+\\.\\d+\\.\\d+$/.test(tag);\n //})\n .map((tag) => tag.replace(/\\/v\\d+\\.\\d+\\.\\d+$/, '')),\n ),\n );\n\n // Get an array of all module names from the terraformModules\n const moduleNamesFromModules = terraformModules.map((module) => module.moduleName);\n\n // Perform a diff between the two arrays to find the module names that need to be removed\n const moduleNamesToRemove = moduleNamesFromTags.filter((moduleName) => !moduleNamesFromModules.includes(moduleName));\n\n info('Terraform modules to remove');\n info(JSON.stringify(moduleNamesToRemove, null, 2));\n\n endGroup();\n\n return moduleNamesToRemove;\n}\n","import { getConfig } from '@/config';\nimport { getContext } from '@/context';\nimport { addPostReleaseComment, addReleasePlanComment, getPullRequestCommits, hasReleaseComment } from '@/pull-request';\nimport { createTaggedRelease, deleteLegacyReleases, getAllReleases } from '@/releases';\nimport { deleteLegacyTags, getAllTags } from '@/tags';\nimport { ensureTerraformDocsConfigDoesNotExist, installTerraformDocs } from '@/terraform-docs';\nimport { getAllTerraformModules, getTerraformChangedModules, getTerraformModulesToRemove } from '@/terraform-module';\nimport type {\n Config,\n Context,\n GitHubRelease,\n ReleasePlanCommentOptions,\n TerraformChangedModule,\n TerraformModule,\n} from '@/types';\nimport { WikiStatus, checkoutWiki, commitAndPushWikiChanges, generateWikiFiles } from '@/wiki';\nimport { endGroup, info, setFailed, setOutput, startGroup } from '@actions/core';\n\n/**\n * Initializes and returns the configuration and context objects.\n * Config must be initialized before context due to dependency constraints.\n *\n * @returns {{ config: Config; context: Context }} Initialized config and context objects.\n */\nfunction initialize(): { config: Config; context: Context } {\n const configInstance = getConfig();\n const contextInstance = getContext();\n return { config: configInstance, context: contextInstance };\n}\n\n/**\n * Handles wiki-related operations, including checkout, generating release plan comments,\n * and error handling for failures.\n *\n * @param {Config} config - The configuration object containing wiki and Terraform Docs settings.\n * @param {TerraformChangedModule[]} terraformChangedModules - List of changed Terraform modules.\n * @param {string[]} terraformModuleNamesToRemove - List of Terraform module names to remove.\n * @returns {Promise } Resolves when wiki-related operations are completed.\n */\nasync function handleReleasePlanComment(\n config: Config,\n terraformChangedModules: TerraformChangedModule[],\n terraformModuleNamesToRemove: string[],\n): Promise {\n let wikiStatus: WikiStatus = WikiStatus.DISABLED;\n let failure: string | undefined;\n let error: Error | undefined;\n\n try {\n if (!config.disableWiki) {\n checkoutWiki();\n wikiStatus = WikiStatus.SUCCESS;\n }\n } catch (err) {\n const errorMessage = err instanceof Error ? err.message.split('\\n')[0] : String(err).split('\\n')[0];\n wikiStatus = WikiStatus.FAILURE;\n failure = errorMessage;\n error = err as Error;\n } finally {\n const commentOptions: ReleasePlanCommentOptions = {\n status: wikiStatus,\n errorMessage: failure,\n };\n await addReleasePlanComment(terraformChangedModules, terraformModuleNamesToRemove, commentOptions);\n }\n\n if (error) {\n throw error;\n }\n}\n\n/**\n * Handles merge-event-specific operations, including tagging new releases, deleting legacy resources,\n * and optionally generating Terraform Docs-based wiki documentation.\n *\n * @param {Config} config - The configuration object.\n * @param {TerraformChangedModule[]} terraformChangedModules - List of changed Terraform modules.\n * @param {string[]} terraformModuleNamesToRemove - List of Terraform module names to remove.\n * @param {TerraformModule[]} terraformModules - List of all Terraform modules in the repository.\n * @param {GitHubRelease[]} allReleases - List of all GitHub releases in the repository.\n * @param {string[]} allTags - List of all tags in the repository.\n * @returns {Promise } Resolves when merge-event operations are complete.\n */\nasync function handleMergeEvent(\n config: Config,\n terraformChangedModules: TerraformChangedModule[],\n terraformModuleNamesToRemove: string[],\n terraformModules: TerraformModule[],\n allReleases: GitHubRelease[],\n allTags: string[],\n): Promise {\n const updatedModules = await createTaggedRelease(terraformChangedModules);\n await addPostReleaseComment(updatedModules);\n\n await deleteLegacyReleases(terraformModuleNamesToRemove, allReleases);\n await deleteLegacyTags(terraformModuleNamesToRemove, allTags);\n\n if (config.disableWiki) {\n info('Wiki generation is disabled.');\n } else {\n installTerraformDocs(config.terraformDocsVersion);\n ensureTerraformDocsConfigDoesNotExist();\n checkoutWiki();\n await generateWikiFiles(terraformModules);\n commitAndPushWikiChanges();\n }\n}\n\n/**\n * Entry point for the GitHub Action. Determines the flow based on whether the event\n * is a pull request or a merge, and executes the appropriate operations.\n *\n * @returns {Promise } Resolves when the action completes successfully or fails.\n */\nexport async function run(): Promise {\n try {\n const { config, context } = initialize();\n\n if (await hasReleaseComment()) {\n info('Release comment found. Exiting.');\n return;\n }\n\n const commits = await getPullRequestCommits();\n const allTags = await getAllTags();\n const allReleases = await getAllReleases();\n const terraformModules = getAllTerraformModules(commits, allTags, allReleases);\n const terraformChangedModules = getTerraformChangedModules(terraformModules);\n const terraformModuleNamesToRemove = getTerraformModulesToRemove(allTags, terraformModules);\n\n if (!context.isPrMergeEvent) {\n await handleReleasePlanComment(config, terraformChangedModules, terraformModuleNamesToRemove);\n } else {\n await handleMergeEvent(\n config,\n terraformChangedModules,\n terraformModuleNamesToRemove,\n terraformModules,\n allReleases,\n allTags,\n );\n }\n\n // Set the outputs for the GitHub Action\n const changedModuleNames = terraformChangedModules.map((module) => module.moduleName);\n const changedModulePaths = terraformChangedModules.map((module) => module.directory);\n const changedModulesMap = Object.fromEntries(\n terraformChangedModules.map((module) => [\n module.moduleName,\n {\n path: module.directory,\n currentTag: module.latestTag,\n nextTag: module.nextTag,\n releaseType: module.releaseType,\n },\n ]),\n );\n\n // Add new outputs for all modules\n const allModuleNames = terraformModules.map((module) => module.moduleName);\n const allModulePaths = terraformModules.map((module) => module.directory);\n const allModulesMap = Object.fromEntries(\n terraformModules.map((module) => [\n module.moduleName,\n {\n path: module.directory,\n latestTag: module.latestTag,\n latestTagVersion: module.latestTagVersion,\n },\n ]),\n );\n\n // Log the changes for debugging\n startGroup('Outputs');\n info(`Changed module names: ${JSON.stringify(changedModuleNames)}`);\n info(`Changed module paths: ${JSON.stringify(changedModulePaths)}`);\n info(`Changed modules map: ${JSON.stringify(changedModulesMap, null, 2)}`);\n info(`All module names: ${JSON.stringify(allModuleNames)}`);\n info(`All module paths: ${JSON.stringify(allModulePaths)}`);\n info(`All modules map: ${JSON.stringify(allModulesMap, null, 2)}`);\n endGroup();\n\n setOutput('changed-module-names', changedModuleNames);\n setOutput('changed-module-paths', changedModulePaths);\n setOutput('changed-modules-map', changedModulesMap);\n setOutput('all-module-names', allModuleNames);\n setOutput('all-module-paths', allModulePaths);\n setOutput('all-modules-map', allModulesMap);\n } catch (error) {\n if (error instanceof Error) {\n setFailed(error.message);\n }\n }\n}\n","/**\n * The entrypoint for the action.\n */\nimport { run } from '@/main';\n\nrun();\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index d521f30..bdd9920 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "terraform-module-releaser", - "version": "1.4.2", + "version": "1.5.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "terraform-module-releaser", - "version": "1.4.2", + "version": "1.5.0", "license": "MIT", "dependencies": { - "@actions/core": "^1.10.1", + "@actions/core": "^1.11.1", "@octokit/core": "^6.1.4", "@octokit/plugin-paginate-rest": "^11.4.3", "@octokit/plugin-rest-endpoint-methods": "^13.3.1", @@ -22,16 +22,16 @@ "@biomejs/biome": "^1.9.4", "@octokit/types": "^13.8.0", "@octokit/webhooks-types": "^7.6.1", - "@types/node": "^22.13.5", + "@types/node": "^22.13.10", "@types/which": "^3.0.4", "@vercel/ncc": "^0.38.3", - "@vitest/coverage-v8": "^3.0.7", + "@vitest/coverage-v8": "^3.0.8", "make-coverage-badge": "^1.2.0", "textlint": "^14.4.2", "textlint-filter-rule-comments": "^1.2.2", "textlint-rule-terminology": "^5.2.12", - "ts-deepmerge": "^7.0.1", - "typescript": "^5.7.3", + "ts-deepmerge": "^7.0.2", + "typescript": "^5.8.2", "vitest": "^3.0.5" }, "engines": { @@ -125,13 +125,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.26.8", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.8.tgz", - "integrity": "sha512-TZIQ25pkSoaKEYYaHbbxkfL36GNsQ6iFiBbeuzAkLnXayKR1yP1zFe+NxuZWWsUyvt8icPU9CCq0sgWGXR1GEw==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz", + "integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.26.8" + "@babel/types": "^7.26.9" }, "bin": { "parser": "bin/babel-parser.js" @@ -141,9 +141,9 @@ } }, "node_modules/@babel/types": { - "version": "7.26.8", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.8.tgz", - "integrity": "sha512-eUuWapzEGWFEpHFxgEaBG8e3n6S8L3MSu0oda755rOfabWPnh0Our1AozNFVUxGFIhbKgd1ksprsoDGMinTOTA==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz", + "integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==", "dev": true, "license": "MIT", "dependencies": { @@ -329,9 +329,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz", - "integrity": "sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", + "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", "cpu": [ "ppc64" ], @@ -346,9 +346,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz", - "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", + "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", "cpu": [ "arm" ], @@ -363,9 +363,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz", - "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", + "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", "cpu": [ "arm64" ], @@ -380,9 +380,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz", - "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", + "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", "cpu": [ "x64" ], @@ -397,9 +397,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz", - "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", + "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", "cpu": [ "arm64" ], @@ -414,9 +414,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz", - "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", + "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", "cpu": [ "x64" ], @@ -431,9 +431,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz", - "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", + "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", "cpu": [ "arm64" ], @@ -448,9 +448,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz", - "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", + "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", "cpu": [ "x64" ], @@ -465,9 +465,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz", - "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", + "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", "cpu": [ "arm" ], @@ -482,9 +482,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz", - "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", + "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", "cpu": [ "arm64" ], @@ -499,9 +499,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz", - "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", + "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", "cpu": [ "ia32" ], @@ -516,9 +516,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz", - "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", + "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", "cpu": [ "loong64" ], @@ -533,9 +533,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz", - "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", + "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", "cpu": [ "mips64el" ], @@ -550,9 +550,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz", - "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", + "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", "cpu": [ "ppc64" ], @@ -567,9 +567,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz", - "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", + "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", "cpu": [ "riscv64" ], @@ -584,9 +584,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz", - "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", + "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", "cpu": [ "s390x" ], @@ -601,9 +601,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz", - "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", + "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", "cpu": [ "x64" ], @@ -618,9 +618,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz", - "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", + "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", "cpu": [ "arm64" ], @@ -635,9 +635,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz", - "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", + "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", "cpu": [ "x64" ], @@ -652,9 +652,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz", - "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", + "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", "cpu": [ "arm64" ], @@ -669,9 +669,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz", - "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", + "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", "cpu": [ "x64" ], @@ -686,9 +686,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz", - "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", + "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", "cpu": [ "x64" ], @@ -703,9 +703,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz", - "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", + "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", "cpu": [ "arm64" ], @@ -720,9 +720,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz", - "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", + "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", "cpu": [ "ia32" ], @@ -737,9 +737,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz", - "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", + "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", "cpu": [ "x64" ], @@ -844,9 +844,9 @@ } }, "node_modules/@keyv/serialize": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@keyv/serialize/-/serialize-1.0.2.tgz", - "integrity": "sha512-+E/LyaAeuABniD/RvUezWVXKpeuvwLEA9//nE9952zBaOdBd2mQ3pPoM8cUe2X6IcMByfuSLzmYqnYshG60+HQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@keyv/serialize/-/serialize-1.0.3.tgz", + "integrity": "sha512-qnEovoOp5Np2JDGonIDL6Ayihw0RhnRh6vxPuHo4RDn1UOzwEo4AeIfpL6UGIrsceWrCMiVPgwRjbHu4vYFc3g==", "dev": true, "license": "MIT", "dependencies": { @@ -894,12 +894,12 @@ } }, "node_modules/@octokit/graphql": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.2.0.tgz", - "integrity": "sha512-gejfDywEml/45SqbWTWrhfwvLBrcGYhOn50sPOjIeVvH6i7D16/9xcFA8dAJNp2HMcd+g4vru41g4E2RBiZvfQ==", + "version": "8.2.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.2.1.tgz", + "integrity": "sha512-n57hXtOoHrhwTWdvhVkdJHdhTv0JstjDbDRhJfwIRNfFqmSo1DaK/mD2syoNUoLCyqSjBpGAKOG0BuwF392slw==", "license": "MIT", "dependencies": { - "@octokit/request": "^9.1.4", + "@octokit/request": "^9.2.2", "@octokit/types": "^13.8.0", "universal-user-agent": "^7.0.0" }, @@ -999,9 +999,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.34.8.tgz", - "integrity": "sha512-q217OSE8DTp8AFHuNHXo0Y86e1wtlfVrXiAlwkIvGRQv9zbc6mE3sjIVfwI8sYUyNxwOg0j/Vm1RKM04JcWLJw==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.35.0.tgz", + "integrity": "sha512-uYQ2WfPaqz5QtVgMxfN6NpLD+no0MYHDBywl7itPYd3K5TjjSghNKmX8ic9S8NU8w81NVhJv/XojcHptRly7qQ==", "cpu": [ "arm" ], @@ -1013,9 +1013,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.34.8.tgz", - "integrity": "sha512-Gigjz7mNWaOL9wCggvoK3jEIUUbGul656opstjaUSGC3eT0BM7PofdAJaBfPFWWkXNVAXbaQtC99OCg4sJv70Q==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.35.0.tgz", + "integrity": "sha512-FtKddj9XZudurLhdJnBl9fl6BwCJ3ky8riCXjEw3/UIbjmIY58ppWwPEvU3fNu+W7FUsAsB1CdH+7EQE6CXAPA==", "cpu": [ "arm64" ], @@ -1027,9 +1027,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.34.8.tgz", - "integrity": "sha512-02rVdZ5tgdUNRxIUrFdcMBZQoaPMrxtwSb+/hOfBdqkatYHR3lZ2A2EGyHq2sGOd0Owk80oV3snlDASC24He3Q==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.35.0.tgz", + "integrity": "sha512-Uk+GjOJR6CY844/q6r5DR/6lkPFOw0hjfOIzVx22THJXMxktXG6CbejseJFznU8vHcEBLpiXKY3/6xc+cBm65Q==", "cpu": [ "arm64" ], @@ -1041,9 +1041,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.34.8.tgz", - "integrity": "sha512-qIP/elwR/tq/dYRx3lgwK31jkZvMiD6qUtOycLhTzCvrjbZ3LjQnEM9rNhSGpbLXVJYQ3rq39A6Re0h9tU2ynw==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.35.0.tgz", + "integrity": "sha512-3IrHjfAS6Vkp+5bISNQnPogRAW5GAV1n+bNCrDwXmfMHbPl5EhTmWtfmwlJxFRUCBZ+tZ/OxDyU08aF6NI/N5Q==", "cpu": [ "x64" ], @@ -1055,9 +1055,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.34.8.tgz", - "integrity": "sha512-IQNVXL9iY6NniYbTaOKdrlVP3XIqazBgJOVkddzJlqnCpRi/yAeSOa8PLcECFSQochzqApIOE1GHNu3pCz+BDA==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.35.0.tgz", + "integrity": "sha512-sxjoD/6F9cDLSELuLNnY0fOrM9WA0KrM0vWm57XhrIMf5FGiN8D0l7fn+bpUeBSU7dCgPV2oX4zHAsAXyHFGcQ==", "cpu": [ "arm64" ], @@ -1069,9 +1069,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.34.8.tgz", - "integrity": "sha512-TYXcHghgnCqYFiE3FT5QwXtOZqDj5GmaFNTNt3jNC+vh22dc/ukG2cG+pi75QO4kACohZzidsq7yKTKwq/Jq7Q==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.35.0.tgz", + "integrity": "sha512-2mpHCeRuD1u/2kruUiHSsnjWtHjqVbzhBkNVQ1aVD63CcexKVcQGwJ2g5VphOd84GvxfSvnnlEyBtQCE5hxVVw==", "cpu": [ "x64" ], @@ -1083,9 +1083,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.34.8.tgz", - "integrity": "sha512-A4iphFGNkWRd+5m3VIGuqHnG3MVnqKe7Al57u9mwgbyZ2/xF9Jio72MaY7xxh+Y87VAHmGQr73qoKL9HPbXj1g==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.35.0.tgz", + "integrity": "sha512-mrA0v3QMy6ZSvEuLs0dMxcO2LnaCONs1Z73GUDBHWbY8tFFocM6yl7YyMu7rz4zS81NDSqhrUuolyZXGi8TEqg==", "cpu": [ "arm" ], @@ -1097,9 +1097,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.34.8.tgz", - "integrity": "sha512-S0lqKLfTm5u+QTxlFiAnb2J/2dgQqRy/XvziPtDd1rKZFXHTyYLoVL58M/XFwDI01AQCDIevGLbQrMAtdyanpA==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.35.0.tgz", + "integrity": "sha512-DnYhhzcvTAKNexIql8pFajr0PiDGrIsBYPRvCKlA5ixSS3uwo/CWNZxB09jhIapEIg945KOzcYEAGGSmTSpk7A==", "cpu": [ "arm" ], @@ -1111,9 +1111,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.34.8.tgz", - "integrity": "sha512-jpz9YOuPiSkL4G4pqKrus0pn9aYwpImGkosRKwNi+sJSkz+WU3anZe6hi73StLOQdfXYXC7hUfsQlTnjMd3s1A==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.35.0.tgz", + "integrity": "sha512-uagpnH2M2g2b5iLsCTZ35CL1FgyuzzJQ8L9VtlJ+FckBXroTwNOaD0z0/UF+k5K3aNQjbm8LIVpxykUOQt1m/A==", "cpu": [ "arm64" ], @@ -1125,9 +1125,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.34.8.tgz", - "integrity": "sha512-KdSfaROOUJXgTVxJNAZ3KwkRc5nggDk+06P6lgi1HLv1hskgvxHUKZ4xtwHkVYJ1Rep4GNo+uEfycCRRxht7+Q==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.35.0.tgz", + "integrity": "sha512-XQxVOCd6VJeHQA/7YcqyV0/88N6ysSVzRjJ9I9UA/xXpEsjvAgDTgH3wQYz5bmr7SPtVK2TsP2fQ2N9L4ukoUg==", "cpu": [ "arm64" ], @@ -1139,9 +1139,9 @@ ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.34.8.tgz", - "integrity": "sha512-NyF4gcxwkMFRjgXBM6g2lkT58OWztZvw5KkV2K0qqSnUEqCVcqdh2jN4gQrTn/YUpAcNKyFHfoOZEer9nwo6uQ==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.35.0.tgz", + "integrity": "sha512-5pMT5PzfgwcXEwOaSrqVsz/LvjDZt+vQ8RT/70yhPU06PTuq8WaHhfT1LW+cdD7mW6i/J5/XIkX/1tCAkh1W6g==", "cpu": [ "loong64" ], @@ -1153,9 +1153,9 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.34.8.tgz", - "integrity": "sha512-LMJc999GkhGvktHU85zNTDImZVUCJ1z/MbAJTnviiWmmjyckP5aQsHtcujMjpNdMZPT2rQEDBlJfubhs3jsMfw==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.35.0.tgz", + "integrity": "sha512-c+zkcvbhbXF98f4CtEIP1EBA/lCic5xB0lToneZYvMeKu5Kamq3O8gqrxiYYLzlZH6E3Aq+TSW86E4ay8iD8EA==", "cpu": [ "ppc64" ], @@ -1167,9 +1167,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.34.8.tgz", - "integrity": "sha512-xAQCAHPj8nJq1PI3z8CIZzXuXCstquz7cIOL73HHdXiRcKk8Ywwqtx2wrIy23EcTn4aZ2fLJNBB8d0tQENPCmw==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.35.0.tgz", + "integrity": "sha512-s91fuAHdOwH/Tad2tzTtPX7UZyytHIRR6V4+2IGlV0Cej5rkG0R61SX4l4y9sh0JBibMiploZx3oHKPnQBKe4g==", "cpu": [ "riscv64" ], @@ -1181,9 +1181,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.34.8.tgz", - "integrity": "sha512-DdePVk1NDEuc3fOe3dPPTb+rjMtuFw89gw6gVWxQFAuEqqSdDKnrwzZHrUYdac7A7dXl9Q2Vflxpme15gUWQFA==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.35.0.tgz", + "integrity": "sha512-hQRkPQPLYJZYGP+Hj4fR9dDBMIM7zrzJDWFEMPdTnTy95Ljnv0/4w/ixFw3pTBMEuuEuoqtBINYND4M7ujcuQw==", "cpu": [ "s390x" ], @@ -1195,9 +1195,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.34.8.tgz", - "integrity": "sha512-8y7ED8gjxITUltTUEJLQdgpbPh1sUQ0kMTmufRF/Ns5tI9TNMNlhWtmPKKHCU0SilX+3MJkZ0zERYYGIVBYHIA==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.35.0.tgz", + "integrity": "sha512-Pim1T8rXOri+0HmV4CdKSGrqcBWX0d1HoPnQ0uw0bdp1aP5SdQVNBy8LjYncvnLgu3fnnCt17xjWGd4cqh8/hA==", "cpu": [ "x64" ], @@ -1209,9 +1209,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.34.8.tgz", - "integrity": "sha512-SCXcP0ZpGFIe7Ge+McxY5zKxiEI5ra+GT3QRxL0pMMtxPfpyLAKleZODi1zdRHkz5/BhueUrYtYVgubqe9JBNQ==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.35.0.tgz", + "integrity": "sha512-QysqXzYiDvQWfUiTm8XmJNO2zm9yC9P/2Gkrwg2dH9cxotQzunBHYr6jk4SujCTqnfGxduOmQcI7c2ryuW8XVg==", "cpu": [ "x64" ], @@ -1223,9 +1223,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.34.8.tgz", - "integrity": "sha512-YHYsgzZgFJzTRbth4h7Or0m5O74Yda+hLin0irAIobkLQFRQd1qWmnoVfwmKm9TXIZVAD0nZ+GEb2ICicLyCnQ==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.35.0.tgz", + "integrity": "sha512-OUOlGqPkVJCdJETKOCEf1mw848ZyJ5w50/rZ/3IBQVdLfR5jk/6Sr5m3iO2tdPgwo0x7VcncYuOvMhBWZq8ayg==", "cpu": [ "arm64" ], @@ -1237,9 +1237,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.34.8.tgz", - "integrity": "sha512-r3NRQrXkHr4uWy5TOjTpTYojR9XmF0j/RYgKCef+Ag46FWUTltm5ziticv8LdNsDMehjJ543x/+TJAek/xBA2w==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.35.0.tgz", + "integrity": "sha512-2/lsgejMrtwQe44glq7AFFHLfJBPafpsTa6JvP2NGef/ifOa4KBoglVf7AKN7EV9o32evBPRqfg96fEHzWo5kw==", "cpu": [ "ia32" ], @@ -1251,9 +1251,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.34.8.tgz", - "integrity": "sha512-U0FaE5O1BCpZSeE6gBl3c5ObhePQSfk9vDRToMmTkbhCOgW4jqvtS5LGyQ76L1fH8sM0keRp4uDTsbjiUyjk0g==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.35.0.tgz", + "integrity": "sha512-PIQeY5XDkrOysbQblSW7v3l1MDZzkTEzAfTPkj5VAu3FW8fS4ynyLg2sINp0fp3SjZ8xkRYpLqoKcYqAkhU1dw==", "cpu": [ "x64" ], @@ -1573,9 +1573,9 @@ } }, "node_modules/@types/node": { - "version": "22.13.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.5.tgz", - "integrity": "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg==", + "version": "22.13.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.10.tgz", + "integrity": "sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw==", "dev": true, "license": "MIT", "dependencies": { @@ -1607,9 +1607,9 @@ } }, "node_modules/@vitest/coverage-v8": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.7.tgz", - "integrity": "sha512-Av8WgBJLTrfLOer0uy3CxjlVuWK4CzcLBndW1Nm2vI+3hZ2ozHututkfc7Blu1u6waeQ7J8gzPK/AsBRnWA5mQ==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.8.tgz", + "integrity": "sha512-y7SAKsQirsEJ2F8bulBck4DoluhI2EEgTimHd6EEUgJBGKy9tC25cpywh1MH4FvDGoG2Unt7+asVd1kj4qOSAw==", "dev": true, "license": "MIT", "dependencies": { @@ -1630,8 +1630,8 @@ "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "3.0.7", - "vitest": "3.0.7" + "@vitest/browser": "3.0.8", + "vitest": "3.0.8" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -1640,14 +1640,14 @@ } }, "node_modules/@vitest/expect": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.7.tgz", - "integrity": "sha512-QP25f+YJhzPfHrHfYHtvRn+uvkCFCqFtW9CktfBxmB+25QqWsx7VB2As6f4GmwllHLDhXNHvqedwhvMmSnNmjw==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.8.tgz", + "integrity": "sha512-Xu6TTIavTvSSS6LZaA3EebWFr6tsoXPetOWNMOlc7LO88QVVBwq2oQWBoDiLCN6YTvNYsGSjqOO8CAdjom5DCQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.7", - "@vitest/utils": "3.0.7", + "@vitest/spy": "3.0.8", + "@vitest/utils": "3.0.8", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" }, @@ -1656,13 +1656,13 @@ } }, "node_modules/@vitest/mocker": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.7.tgz", - "integrity": "sha512-qui+3BLz9Eonx4EAuR/i+QlCX6AUZ35taDQgwGkK/Tw6/WgwodSrjN1X2xf69IA/643ZX5zNKIn2svvtZDrs4w==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.8.tgz", + "integrity": "sha512-n3LjS7fcW1BCoF+zWZxG7/5XvuYH+lsFg+BDwwAz0arIwHQJFUEsKBQ0BLU49fCxuM/2HSeBPHQD8WjgrxMfow==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.7", + "@vitest/spy": "3.0.8", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, @@ -1683,9 +1683,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.7.tgz", - "integrity": "sha512-CiRY0BViD/V8uwuEzz9Yapyao+M9M008/9oMOSQydwbwb+CMokEq3XVaF3XK/VWaOK0Jm9z7ENhybg70Gtxsmg==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.8.tgz", + "integrity": "sha512-BNqwbEyitFhzYMYHUVbIvepOyeQOSFA/NeJMIP9enMntkkxLgOcgABH6fjyXG85ipTgvero6noreavGIqfJcIg==", "dev": true, "license": "MIT", "dependencies": { @@ -1696,13 +1696,13 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.7.tgz", - "integrity": "sha512-WeEl38Z0S2ZcuRTeyYqaZtm4e26tq6ZFqh5y8YD9YxfWuu0OFiGFUbnxNynwLjNRHPsXyee2M9tV7YxOTPZl2g==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.8.tgz", + "integrity": "sha512-c7UUw6gEcOzI8fih+uaAXS5DwjlBaCJUo7KJ4VvJcjL95+DSR1kova2hFuRt3w41KZEFcOEiq098KkyrjXeM5w==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.7", + "@vitest/utils": "3.0.8", "pathe": "^2.0.3" }, "funding": { @@ -1710,13 +1710,13 @@ } }, "node_modules/@vitest/snapshot": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.7.tgz", - "integrity": "sha512-eqTUryJWQN0Rtf5yqCGTQWsCFOQe4eNz5Twsu21xYEcnFJtMU5XvmG0vgebhdLlrHQTSq5p8vWHJIeJQV8ovsA==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.8.tgz", + "integrity": "sha512-x8IlMGSEMugakInj44nUrLSILh/zy1f2/BgH0UeHpNyOocG18M9CWVIFBaXPt8TrqVZWmcPjwfG/ht5tnpba8A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.7", + "@vitest/pretty-format": "3.0.8", "magic-string": "^0.30.17", "pathe": "^2.0.3" }, @@ -1725,9 +1725,9 @@ } }, "node_modules/@vitest/spy": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.7.tgz", - "integrity": "sha512-4T4WcsibB0B6hrKdAZTM37ekuyFZt2cGbEGd2+L0P8ov15J1/HUsUaqkXEQPNAWr4BtPPe1gI+FYfMHhEKfR8w==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.8.tgz", + "integrity": "sha512-MR+PzJa+22vFKYb934CejhR4BeRpMSoxkvNoDit68GQxRLSf11aT6CTj3XaqUU9rxgWJFnqicN/wxw6yBRkI1Q==", "dev": true, "license": "MIT", "dependencies": { @@ -1738,13 +1738,13 @@ } }, "node_modules/@vitest/utils": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.7.tgz", - "integrity": "sha512-xePVpCRfooFX3rANQjwoditoXgWb1MaFbzmGuPP59MK6i13mrnDw/yEIyJudLeW6/38mCNcwCiJIGmpDPibAIg==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.8.tgz", + "integrity": "sha512-nkBC3aEhfX2PdtQI/QwAWp8qZWwzASsU4Npbcd5RdMPBSSLCpkZp52P3xku3s3uA0HIEhGvEcF8rNkBsz9dQ4Q==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.7", + "@vitest/pretty-format": "3.0.8", "loupe": "^3.1.3", "tinyrainbow": "^2.0.0" }, @@ -1924,14 +1924,14 @@ } }, "node_modules/cacheable": { - "version": "1.8.8", - "resolved": "https://registry.npmjs.org/cacheable/-/cacheable-1.8.8.tgz", - "integrity": "sha512-OE1/jlarWxROUIpd0qGBSKFLkNsotY8pt4GeiVErUYh/NUeTNrT+SBksUgllQv4m6a0W/VZsLuiHb88maavqEw==", + "version": "1.8.9", + "resolved": "https://registry.npmjs.org/cacheable/-/cacheable-1.8.9.tgz", + "integrity": "sha512-FicwAUyWnrtnd4QqYAoRlNs44/a1jTL7XDKqm5gJ90wz1DQPlC7U2Rd1Tydpv+E7WAr4sQHuw8Q8M3nZMAyecQ==", "dev": true, "license": "MIT", "dependencies": { - "hookified": "^1.7.0", - "keyv": "^5.2.3" + "hookified": "^1.7.1", + "keyv": "^5.3.1" } }, "node_modules/ccount": { @@ -2177,9 +2177,9 @@ "license": "MIT" }, "node_modules/esbuild": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz", - "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", + "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -2190,31 +2190,31 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.0", - "@esbuild/android-arm": "0.25.0", - "@esbuild/android-arm64": "0.25.0", - "@esbuild/android-x64": "0.25.0", - "@esbuild/darwin-arm64": "0.25.0", - "@esbuild/darwin-x64": "0.25.0", - "@esbuild/freebsd-arm64": "0.25.0", - "@esbuild/freebsd-x64": "0.25.0", - "@esbuild/linux-arm": "0.25.0", - "@esbuild/linux-arm64": "0.25.0", - "@esbuild/linux-ia32": "0.25.0", - "@esbuild/linux-loong64": "0.25.0", - "@esbuild/linux-mips64el": "0.25.0", - "@esbuild/linux-ppc64": "0.25.0", - "@esbuild/linux-riscv64": "0.25.0", - "@esbuild/linux-s390x": "0.25.0", - "@esbuild/linux-x64": "0.25.0", - "@esbuild/netbsd-arm64": "0.25.0", - "@esbuild/netbsd-x64": "0.25.0", - "@esbuild/openbsd-arm64": "0.25.0", - "@esbuild/openbsd-x64": "0.25.0", - "@esbuild/sunos-x64": "0.25.0", - "@esbuild/win32-arm64": "0.25.0", - "@esbuild/win32-ia32": "0.25.0", - "@esbuild/win32-x64": "0.25.0" + "@esbuild/aix-ppc64": "0.25.1", + "@esbuild/android-arm": "0.25.1", + "@esbuild/android-arm64": "0.25.1", + "@esbuild/android-x64": "0.25.1", + "@esbuild/darwin-arm64": "0.25.1", + "@esbuild/darwin-x64": "0.25.1", + "@esbuild/freebsd-arm64": "0.25.1", + "@esbuild/freebsd-x64": "0.25.1", + "@esbuild/linux-arm": "0.25.1", + "@esbuild/linux-arm64": "0.25.1", + "@esbuild/linux-ia32": "0.25.1", + "@esbuild/linux-loong64": "0.25.1", + "@esbuild/linux-mips64el": "0.25.1", + "@esbuild/linux-ppc64": "0.25.1", + "@esbuild/linux-riscv64": "0.25.1", + "@esbuild/linux-s390x": "0.25.1", + "@esbuild/linux-x64": "0.25.1", + "@esbuild/netbsd-arm64": "0.25.1", + "@esbuild/netbsd-x64": "0.25.1", + "@esbuild/openbsd-arm64": "0.25.1", + "@esbuild/openbsd-x64": "0.25.1", + "@esbuild/sunos-x64": "0.25.1", + "@esbuild/win32-arm64": "0.25.1", + "@esbuild/win32-ia32": "0.25.1", + "@esbuild/win32-x64": "0.25.1" } }, "node_modules/escape-string-regexp": { @@ -2255,9 +2255,9 @@ } }, "node_modules/expect-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", - "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.0.tgz", + "integrity": "sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2340,13 +2340,13 @@ } }, "node_modules/file-entry-cache": { - "version": "10.0.6", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-10.0.6.tgz", - "integrity": "sha512-0wvv16mVo9nN0Md3k7DMjgAPKG/TY4F/gYMBVb/wMThFRJvzrpaqBFqF6km9wf8QfYTN+mNg5aeaBLfy8k35uA==", + "version": "10.0.7", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-10.0.7.tgz", + "integrity": "sha512-txsf5fu3anp2ff3+gOJJzRImtrtm/oa9tYLN0iTuINZ++EyVR/nRrg2fKYwvG/pXDofcrvvb0scEbX3NyW/COw==", "dev": true, "license": "MIT", "dependencies": { - "flat-cache": "^6.1.6" + "flat-cache": "^6.1.7" } }, "node_modules/find-up": { @@ -2363,32 +2363,32 @@ } }, "node_modules/flat-cache": { - "version": "6.1.6", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-6.1.6.tgz", - "integrity": "sha512-F+CKgSwp0pzLx67u+Zy1aCueVWFAHWbXepvXlZ+bWVTaASbm5SyCnSJ80Fp1ePEmS57wU+Bf6cx6525qtMZ4lQ==", + "version": "6.1.7", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-6.1.7.tgz", + "integrity": "sha512-qwZ4xf1v1m7Rc9XiORly31YaChvKt6oNVHuqqZcoED/7O+ToyNVGobKsIAopY9ODcWpEDKEBAbrSOCBHtNQvew==", "dev": true, "license": "MIT", "dependencies": { - "cacheable": "^1.8.8", - "flatted": "^3.3.2", - "hookified": "^1.7.0" + "cacheable": "^1.8.9", + "flatted": "^3.3.3", + "hookified": "^1.7.1" } }, "node_modules/flatted": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", - "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "dev": true, "license": "ISC" }, "node_modules/foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", "dev": true, "license": "ISC", "dependencies": { - "cross-spawn": "^7.0.0", + "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" }, "engines": { @@ -2510,9 +2510,9 @@ } }, "node_modules/hookified": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/hookified/-/hookified-1.7.0.tgz", - "integrity": "sha512-XQdMjqC1AyeOzfs+17cnIk7Wdfu1hh2JtcyNfBf5u9jHrT3iZUlGHxLTntFBuk5lwkqJ6l3+daeQdHK5yByHVA==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/hookified/-/hookified-1.7.1.tgz", + "integrity": "sha512-OXcdHsXeOiD7OJ5zvWj8Oy/6RCdLwntAX+wUrfemNcMGn6sux4xbEHi2QXwqePYhjQ/yvxxq2MvCRirdlHscBw==", "dev": true, "license": "MIT" }, @@ -2777,13 +2777,13 @@ } }, "node_modules/keyv": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.2.3.tgz", - "integrity": "sha512-AGKecUfzrowabUv0bH1RIR5Vf7w+l4S3xtQAypKaUpTdIR1EbrAcTxHCrpo9Q+IWeUlFE2palRtgIQcgm+PQJw==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.3.1.tgz", + "integrity": "sha512-13hQT2q2VIwOoaJdJa7nY3J8UVbYtMTJFHnwm9LI+SaQRfUiM6Em9KZeOVTCKbMnGcRIL3NSUFpAdjZCq24nLQ==", "dev": true, "license": "MIT", "dependencies": { - "@keyv/serialize": "^1.0.2" + "@keyv/serialize": "^1.0.3" } }, "node_modules/levn": { @@ -3322,9 +3322,9 @@ "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", + "version": "3.3.9", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.9.tgz", + "integrity": "sha512-SppoicMGpZvbF1l3z4x7No3OlIjP7QJvC9XR7AhZr1kL133KHnKPztkKDc+Ir4aJ/1VhTySrtKhrsycmrMQfvg==", "dev": true, "funding": [ { @@ -3891,9 +3891,9 @@ } }, "node_modules/rollup": { - "version": "4.34.8", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.34.8.tgz", - "integrity": "sha512-489gTVMzAYdiZHFVA/ig/iYFllCcWFHMvUHI1rpFmkoUtRlQxqh6/yiNqnYibjMZ2b/+FUQwldG+aLsEt6bglQ==", + "version": "4.35.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.35.0.tgz", + "integrity": "sha512-kg6oI4g+vc41vePJyO6dHt/yl0Rz3Thv0kJeVQ3D1kS3E5XSuKbPc29G4IpT/Kv1KQwgHVcN+HtyS+HYLNSvQg==", "dev": true, "license": "MIT", "dependencies": { @@ -3907,25 +3907,25 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.34.8", - "@rollup/rollup-android-arm64": "4.34.8", - "@rollup/rollup-darwin-arm64": "4.34.8", - "@rollup/rollup-darwin-x64": "4.34.8", - "@rollup/rollup-freebsd-arm64": "4.34.8", - "@rollup/rollup-freebsd-x64": "4.34.8", - "@rollup/rollup-linux-arm-gnueabihf": "4.34.8", - "@rollup/rollup-linux-arm-musleabihf": "4.34.8", - "@rollup/rollup-linux-arm64-gnu": "4.34.8", - "@rollup/rollup-linux-arm64-musl": "4.34.8", - "@rollup/rollup-linux-loongarch64-gnu": "4.34.8", - "@rollup/rollup-linux-powerpc64le-gnu": "4.34.8", - "@rollup/rollup-linux-riscv64-gnu": "4.34.8", - "@rollup/rollup-linux-s390x-gnu": "4.34.8", - "@rollup/rollup-linux-x64-gnu": "4.34.8", - "@rollup/rollup-linux-x64-musl": "4.34.8", - "@rollup/rollup-win32-arm64-msvc": "4.34.8", - "@rollup/rollup-win32-ia32-msvc": "4.34.8", - "@rollup/rollup-win32-x64-msvc": "4.34.8", + "@rollup/rollup-android-arm-eabi": "4.35.0", + "@rollup/rollup-android-arm64": "4.35.0", + "@rollup/rollup-darwin-arm64": "4.35.0", + "@rollup/rollup-darwin-x64": "4.35.0", + "@rollup/rollup-freebsd-arm64": "4.35.0", + "@rollup/rollup-freebsd-x64": "4.35.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.35.0", + "@rollup/rollup-linux-arm-musleabihf": "4.35.0", + "@rollup/rollup-linux-arm64-gnu": "4.35.0", + "@rollup/rollup-linux-arm64-musl": "4.35.0", + "@rollup/rollup-linux-loongarch64-gnu": "4.35.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.35.0", + "@rollup/rollup-linux-riscv64-gnu": "4.35.0", + "@rollup/rollup-linux-s390x-gnu": "4.35.0", + "@rollup/rollup-linux-x64-gnu": "4.35.0", + "@rollup/rollup-linux-x64-musl": "4.35.0", + "@rollup/rollup-win32-arm64-msvc": "4.35.0", + "@rollup/rollup-win32-ia32-msvc": "4.35.0", + "@rollup/rollup-win32-x64-msvc": "4.35.0", "fsevents": "~2.3.2" } }, @@ -4064,9 +4064,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", - "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.1.tgz", + "integrity": "sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==", "dev": true, "license": "MIT" }, @@ -4509,9 +4509,9 @@ } }, "node_modules/typescript": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", - "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", + "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -4721,9 +4721,9 @@ } }, "node_modules/vite": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.0.tgz", - "integrity": "sha512-7dPxoo+WsT/64rDcwoOjk76XHj+TqNTIvHKcuMQ1k4/SeHDaQt5GFAeLYzrimZrMpn/O6DtdI03WUjdxuPM0oQ==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.1.tgz", + "integrity": "sha512-n2GnqDb6XPhlt9B8olZPrgMD/es/Nd1RdChF6CBD/fHW6pUyUTt2sQW2fPRX5GiD9XEa6+8A6A4f2vT6pSsE7Q==", "dev": true, "license": "MIT", "dependencies": { @@ -4793,9 +4793,9 @@ } }, "node_modules/vite-node": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.7.tgz", - "integrity": "sha512-2fX0QwX4GkkkpULXdT1Pf4q0tC1i1lFOyseKoonavXUNlQ77KpW2XqBGGNIm/J4Ows4KxgGJzDguYVPKwG/n5A==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.8.tgz", + "integrity": "sha512-6PhR4H9VGlcwXZ+KWCdMqbtG649xCPZqfI9j2PsK1FcXgEzro5bGHcVKFCTqPLaNKZES8Evqv4LwvZARsq5qlg==", "dev": true, "license": "MIT", "dependencies": { @@ -4816,19 +4816,19 @@ } }, "node_modules/vitest": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.7.tgz", - "integrity": "sha512-IP7gPK3LS3Fvn44x30X1dM9vtawm0aesAa2yBIZ9vQf+qB69NXC5776+Qmcr7ohUXIQuLhk7xQR0aSUIDPqavg==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.8.tgz", + "integrity": "sha512-dfqAsNqRGUc8hB9OVR2P0w8PZPEckti2+5rdZip0WIz9WW0MnImJ8XiR61QhqLa92EQzKP2uPkzenKOAHyEIbA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.7", - "@vitest/mocker": "3.0.7", - "@vitest/pretty-format": "^3.0.7", - "@vitest/runner": "3.0.7", - "@vitest/snapshot": "3.0.7", - "@vitest/spy": "3.0.7", - "@vitest/utils": "3.0.7", + "@vitest/expect": "3.0.8", + "@vitest/mocker": "3.0.8", + "@vitest/pretty-format": "^3.0.8", + "@vitest/runner": "3.0.8", + "@vitest/snapshot": "3.0.8", + "@vitest/spy": "3.0.8", + "@vitest/utils": "3.0.8", "chai": "^5.2.0", "debug": "^4.4.0", "expect-type": "^1.1.0", @@ -4840,7 +4840,7 @@ "tinypool": "^1.0.2", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.7", + "vite-node": "3.0.8", "why-is-node-running": "^2.3.0" }, "bin": { @@ -4856,8 +4856,8 @@ "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.7", - "@vitest/ui": "3.0.7", + "@vitest/browser": "3.0.8", + "@vitest/ui": "3.0.8", "happy-dom": "*", "jsdom": "*" }, @@ -5023,9 +5023,9 @@ } }, "node_modules/yocto-queue": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.1.1.tgz", - "integrity": "sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.0.tgz", + "integrity": "sha512-KHBC7z61OJeaMGnF3wqNZj+GGNXOyypZviiKpQeiHirG5Ib1ImwcLBH70rbMSkKfSmUNBsdf2PwaEJtKvgmkNw==", "license": "MIT", "engines": { "node": ">=12.20" diff --git a/package.json b/package.json index 708b80d..bece7d0 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "terraform-module-releaser", "description": "GitHub Actions TypeScript template", - "version": "1.4.2", + "version": "1.5.0", "author": "virgofx", "type": "module", "main": "lib/index.js", @@ -45,7 +45,7 @@ "coverage": "make-coverage-badge --output-path ./assets/coverage-badge.svg" }, "dependencies": { - "@actions/core": "^1.10.1", + "@actions/core": "^1.11.1", "@octokit/core": "^6.1.4", "@octokit/plugin-paginate-rest": "^11.4.3", "@octokit/plugin-rest-endpoint-methods": "^13.3.1", @@ -58,16 +58,16 @@ "@biomejs/biome": "^1.9.4", "@octokit/types": "^13.8.0", "@octokit/webhooks-types": "^7.6.1", - "@types/node": "^22.13.5", + "@types/node": "^22.13.10", "@types/which": "^3.0.4", "@vercel/ncc": "^0.38.3", - "@vitest/coverage-v8": "^3.0.7", + "@vitest/coverage-v8": "^3.0.8", "make-coverage-badge": "^1.2.0", "textlint": "^14.4.2", "textlint-filter-rule-comments": "^1.2.2", "textlint-rule-terminology": "^5.2.12", - "ts-deepmerge": "^7.0.1", - "typescript": "^5.7.3", + "ts-deepmerge": "^7.0.2", + "typescript": "^5.8.2", "vitest": "^3.0.5" } } diff --git a/src/config.ts b/src/config.ts index be28619..40f52db 100644 --- a/src/config.ts +++ b/src/config.ts @@ -9,10 +9,11 @@ let configInstance: Config | null = null; * are removed and each value is trimmed of whitespace. * * @param inputName - Name of the input to retrieve. + * @param required - Whether the input is required. * @returns An array of trimmed and filtered values. */ -const getArrayInput = (inputName: string): string[] => { - const input = getInput(inputName, { required: true }); +const getArrayInput = (inputName: string, required: boolean): string[] => { + const input = getInput(inputName, { required }); return Array.from( new Set( @@ -59,9 +60,9 @@ function initializeConfig(): Config { // Initialize the config instance configInstance = { - majorKeywords: getArrayInput('major-keywords'), - minorKeywords: getArrayInput('minor-keywords'), - patchKeywords: getArrayInput('patch-keywords'), + majorKeywords: getArrayInput('major-keywords', true), + minorKeywords: getArrayInput('minor-keywords', true), + patchKeywords: getArrayInput('patch-keywords', true), defaultFirstTag: getInput('default-first-tag', { required: true }), terraformDocsVersion: getInput('terraform-docs-version', { required: true }), deleteLegacyTags: getBooleanInput('delete-legacy-tags', { required: true }), @@ -69,8 +70,9 @@ function initializeConfig(): Config { wikiSidebarChangelogMax: Number.parseInt(getInput('wiki-sidebar-changelog-max', { required: true }), 10), disableBranding: getBooleanInput('disable-branding', { required: true }), githubToken: getInput('github_token', { required: true }), - moduleChangeExcludePatterns: getArrayInput('module-change-exclude-patterns'), - moduleAssetExcludePatterns: getArrayInput('module-asset-exclude-patterns'), + modulePathIgnore: getArrayInput('module-path-ignore', false), + moduleChangeExcludePatterns: getArrayInput('module-change-exclude-patterns', false), + moduleAssetExcludePatterns: getArrayInput('module-asset-exclude-patterns', false), useSSHSourceFormat: getBooleanInput('use-ssh-source-format', { required: true }), }; @@ -95,6 +97,7 @@ function initializeConfig(): Config { info(`Delete Legacy Tags: ${configInstance.deleteLegacyTags}`); info(`Disable Wiki: ${configInstance.disableWiki}`); info(`Wiki Sidebar Changelog Max: ${configInstance.wikiSidebarChangelogMax}`); + info(`Module Paths to Ignore: ${configInstance.modulePathIgnore.join(', ')}`); info(`Module Change Exclude Patterns: ${configInstance.moduleChangeExcludePatterns.join(', ')}`); info(`Module Asset Exclude Patterns: ${configInstance.moduleAssetExcludePatterns.join(', ')}`); info(`Use SSH Source Format: ${configInstance.useSSHSourceFormat}`); diff --git a/src/terraform-module.ts b/src/terraform-module.ts index 3399dca..0029305 100644 --- a/src/terraform-module.ts +++ b/src/terraform-module.ts @@ -3,7 +3,7 @@ import { dirname, isAbsolute, join, relative, resolve } from 'node:path'; import { config } from '@/config'; import { context } from '@/context'; import type { CommitDetails, GitHubRelease, TerraformChangedModule, TerraformModule } from '@/types'; -import { isTerraformDirectory, shouldExcludeFile } from '@/utils/file'; +import { isTerraformDirectory, shouldExcludeFile, shouldIgnoreModulePath } from '@/utils/file'; import { determineReleaseType, getNextTagVersion } from '@/utils/semver'; import { removeTrailingDots } from '@/utils/string'; import { debug, endGroup, info, startGroup } from '@actions/core'; @@ -68,7 +68,7 @@ function getTerraformModuleNameFromRelativePath(terraformDirectory: string): str /** * Gets the relative path of the Terraform module directory associated with a specified file. * - * Traverses upward from the file’s directory to locate the nearest Terraform module directory. + * Traverses upward from the file's directory to locate the nearest Terraform module directory. * Returns the module's path relative to the current working directory. * * @param {string} filePath - The absolute or relative path of the file to analyze. @@ -173,6 +173,8 @@ export function getAllTerraformModules( const terraformModulesMap: Record = {}; const workspaceDir = context.workspaceDir; + // Terraform only processes .tf and .tf.json files in the current working directory where you run the terraform commands. It does not automatically scan or include files from subdirectories. + // Helper function to recursively search for Terraform modules const searchDirectory = (dir: string) => { const files = readdirSync(dir); @@ -184,23 +186,40 @@ export function getAllTerraformModules( // If it's a directory, recursively search inside it if (stat.isDirectory()) { if (isTerraformDirectory(filePath)) { - const moduleName = getTerraformModuleNameFromRelativePath(relative(workspaceDir, filePath)); + const relativePath = relative(workspaceDir, filePath); + + // Check if this module path should be ignored + if (shouldIgnoreModulePath(relativePath, config.modulePathIgnore)) { + info(`Skipping module in ${relativePath} due to module-path-ignore match`); + continue; + } + + const moduleName = getTerraformModuleNameFromRelativePath(relativePath); terraformModulesMap[moduleName] = { moduleName, directory: filePath, ...getTagsForModule(moduleName, allTags), releases: getReleasesForModule(moduleName, allReleases), }; - } else { - searchDirectory(filePath); // Recurse into subdirectories } + + // We'll always recurse into subdirectories to find terraform modules even after we've found a match. + // This is because we want to find all modules in the workspace and although not conventional, there are + // cases where a module could be completely nested within another module and be 100% separate. + searchDirectory(filePath); // Recurse into subdirectories } } }; // Start the search from the workspace root directory + info(`Searching for Terraform modules in ${workspaceDir}`); searchDirectory(workspaceDir); + const totalModulesFound = Object.keys(terraformModulesMap).length; + info(`Found ${totalModulesFound} Terraform module${totalModulesFound !== 1 ? 's' : ''}`); + info('Terraform Modules:'); + info(JSON.stringify(terraformModulesMap, null, 2)); + // Now process commits to find changed modules for (const { message, sha, files } of commits) { info(`Parsing commit ${sha}: ${message.trim().split('\n')[0].trim()} (Changed Files = ${files.length})`); @@ -214,13 +233,17 @@ export function getAllTerraformModules( continue; } + // Check if this module path should be ignored + if (shouldIgnoreModulePath(moduleRelativePath, config.modulePathIgnore)) { + info(` (skipping) ➜ Matches module-path-ignore pattern for path \`${moduleRelativePath}\``); + continue; + } + const moduleName = getTerraformModuleNameFromRelativePath(moduleRelativePath); // Skip excluded files based on provided pattern if (shouldExcludeFile(moduleRelativePath, relativeFilePath, config.moduleChangeExcludePatterns)) { - // Note: This could happen if we detect a change in a subdirectory of a terraform module - // but the change is in a file that we want to exclude. - info(`Excluding module "${moduleName}" match from "${relativeFilePath}" due to exclude pattern match.`); + info(` (skipping) ➜ Matches module-change-exclude-pattern for path \`${moduleRelativePath}\``); continue; } diff --git a/src/types/index.ts b/src/types/index.ts index 9aa3c12..8aa8395 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -206,6 +206,13 @@ export interface Config { * When set to true, the SSH standard format (non scp variation) will be used. Otherwise, the HTTPS format will be used. */ useSSHSourceFormat: boolean; + + /** + * A list of module paths to completely ignore when processing. Any module whose path matches + * one of these patterns will not be processed for versioning, release, or documentation. + * Paths are relative to the workspace directory. + */ + modulePathIgnore: string[]; } /** diff --git a/src/utils/file.ts b/src/utils/file.ts index 1f7a8b3..49737e4 100644 --- a/src/utils/file.ts +++ b/src/utils/file.ts @@ -13,6 +13,39 @@ export function isTerraformDirectory(dirPath: string): boolean { return existsSync(dirPath) && readdirSync(dirPath).some((file) => extname(file) === '.tf'); } +/** + * Checks if a module path should be ignored based on provided ignore patterns. + * + * This function evaluates whether a given module path matches any of the specified ignore patterns + * using the minimatch library for glob pattern matching. + * + * @remarks + * Important pattern matching behavior notes: + * - A pattern like "dir/**" will match files/directories INSIDE "dir" but NOT "dir" itself + * - To match both a directory and its contents, you must include both patterns: + * ["dir", "dir/**"] + * - The function uses matchBase: false for precise path structure matching + * + * @example + * // Will return false (doesn't match the directory itself) + * shouldIgnoreModulePath('tf-modules/kms/examples/complete', ['tf-modules/kms/examples/complete/**']); + * + * @example + * // Will return true (matches the exact path) + * shouldIgnoreModulePath('tf-modules/kms/examples/complete', ['tf-modules/kms/examples/complete']); + * + * @param {string} modulePath - The path of the module to check. + * @param {string[]} ignorePatterns - Array of path patterns to ignore. + * @returns {boolean} True if the module should be ignored, false otherwise. + */ +export function shouldIgnoreModulePath(modulePath: string, ignorePatterns: string[]): boolean { + if (!ignorePatterns || ignorePatterns.length === 0) { + return false; + } + + return ignorePatterns.some((pattern: string) => minimatch(modulePath, pattern, { matchBase: false })); +} + /** * Checks if a file should be excluded from matching based on the defined exclude patterns * and relative paths from the base directory. diff --git a/tf-modules/kms/examples/complete/main.tf b/tf-modules/kms/examples/complete/main.tf new file mode 100644 index 0000000..4981b62 --- /dev/null +++ b/tf-modules/kms/examples/complete/main.tf @@ -0,0 +1,10 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.0" + } + } + + required_version = ">= 0.12" +} diff --git a/tf-modules/kms/main.tf b/tf-modules/kms/main.tf new file mode 100644 index 0000000..4981b62 --- /dev/null +++ b/tf-modules/kms/main.tf @@ -0,0 +1,10 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.0" + } + } + + required_version = ">= 0.12" +} diff --git a/tf-modules/kms/variables.tf b/tf-modules/kms/variables.tf new file mode 100644 index 0000000..e69de29 diff --git a/tsconfig.json b/tsconfig.json index 292f9b3..a0b6c56 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -4,7 +4,7 @@ "target": "ES2022", "module": "ES2022", "moduleResolution": "bundler", - "types": ["node"], + "types": ["node", "vite/client"], "baseUrl": ".", "rootDir": ".", "outDir": "./dist",