From 781c97fd15b22843d1267d749f6e96261669cf8c Mon Sep 17 00:00:00 2001 From: Sergey Shandar Date: Mon, 29 Apr 2019 11:28:37 -0700 Subject: [PATCH] [Merge is planed on 2019/04/29] Move scripts from this repo. (#5719) * Breaking Change2 * Experimental lintdiff * No more model/semantic/syntax validations in travis-CI. * Scripts 0.1.8 * Use scripts from @azure/rest-api-specs-scripts * ts-utils * fix breaking changes * momentOfTruthPostProcessing * momentOfTruthPostProcessing.ts * remove 'ts-utils.ts' * Remove `utils` * postToGitHub.ts * switch to momentOfTruthUtils * minor * Syntax.ts * semanticValidation.ts * another try * minor update * URL/fix * OAD ^0.2.13 * update to new scripts. * Use scripts from `sergey/move-from-specs-repo`. * simplified model validation. * preprod * postToGitHub * momentOfTruthPostProcessing * Semantic Validation * getSwaggers() * Use `getExamples()` and `getSwaggers()`. * Update package.json * Remove travis-CI jobs except branch strategy. * remove model and semantic validations version 2. --- .travis.yml | 33 -- .vscode/settings.json | 3 +- package.json | 5 +- preproduction-azure-pipelines.yml | 4 +- scripts/modelValidation.ts | 39 +-- scripts/modelValidation2.js | 7 - scripts/momentOfTruthPostProcessing.js | 423 ------------------------- scripts/momentOfTruthPostProcessing.ts | 6 + scripts/multiapi.ts | 3 + scripts/postToGitHub.js | 29 -- scripts/semanticValidation.ts | 25 +- scripts/semanticValidation2.js | 7 - scripts/ts-utils.ts | 4 - test/linter.js | 151 --------- test/{syntax.js => syntax.ts} | 44 +-- test/util/utils.ts | 347 -------------------- 16 files changed, 45 insertions(+), 1085 deletions(-) delete mode 100644 scripts/modelValidation2.js delete mode 100644 scripts/momentOfTruthPostProcessing.js create mode 100644 scripts/momentOfTruthPostProcessing.ts delete mode 100644 scripts/postToGitHub.js delete mode 100644 scripts/semanticValidation2.js delete mode 100644 scripts/ts-utils.ts delete mode 100644 test/linter.js rename test/{syntax.js => syntax.ts} (60%) delete mode 100644 test/util/utils.ts diff --git a/.travis.yml b/.travis.yml index 7cf1bcf34253..f2d34e652a8d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,15 +8,8 @@ services: env: matrix: - MODE=branchStrategy - - MODE=semantic PR_ONLY=true CHECK_NAME="Semantic Validator" - - MODE=model PR_ONLY=true CHECK_NAME="Model Validator" - - MODE=BreakingChange PR_ONLY=true CHECK_NAME="Breaking Changes" - - MODE=lintdiff PR_ONLY=true CHECK_NAME="Linter Diff" NODE_OPTIONS=--max-old-space-size=8192 matrix: fast_finish: true - allow_failures: - - env: MODE=model PR_ONLY=true CHECK_NAME="Model Validator" - - env: MODE=BreakingChange PR_ONLY=true CHECK_NAME="Breaking Changes" install: true script: - >- @@ -24,29 +17,3 @@ script: # Check to ensure CI is not executing for a PR against the master branch in the private repository ! [[ $TRAVIS_PULL_REQUEST != 'false' && $TRAVIS_REPO_SLUG == 'Azure/azure-rest-api-specs-pr' && $TRAVIS_BRANCH == 'master' ]] fi - - >- - if [[ $MODE == 'semantic' ]]; then - npm install - npm run tsc - node scripts/semanticValidation.js - fi - - >- - if [[ $MODE == 'model' ]]; then - npm install - npm run tsc - node scripts/modelValidation.js - fi - - >- - if [[ $MODE == 'BreakingChange' ]]; then - scripts/install-dotnet.sh - npm install - npm run tsc - node scripts/breaking-change.js - fi - - >- - if [[ $MODE == 'lintdiff' ]]; then - scripts/install-dotnet.sh - npm install - npm run tsc - node scripts/momentOfTruth.js && node scripts/momentOfTruthPostProcessing.js - fi \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 3c87fbc2497c..86500570ba6b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -19,5 +19,6 @@ ], "url":"https://raw.githubusercontent.com/Azure/autorest/master/schema/composite-swagger.json" } - ] + ], + "typescript.tsdk": "node_modules\\typescript\\lib" } \ No newline at end of file diff --git a/package.json b/package.json index 9de64abe4b93..cf540ccbac6c 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "devDependencies": { "@azure/avocado": "^0.4.1", "@azure/oad": "^0.6.3", - "@azure/rest-api-specs-scripts": "^0.2.21", + "@azure/rest-api-specs-scripts": "^0.3.9", "@microsoft.azure/async-io": "^2.0.21", "@microsoft.azure/literate": "^1.0.25", "@microsoft.azure/polyfill": "^1.0.19", @@ -21,6 +21,7 @@ "@ts-common/fs": "0.2.0", "@types/fs-extra": "^5.0.5", "@types/js-yaml": "^3.12.1", + "@types/mocha": "^5.2.6", "@types/request": "^2.48.1", "fs-extra": "^7.0.1", "glob": "^7.1.3", @@ -31,7 +32,7 @@ "request": "^2.88.0", "request-promise-native": "^1.0.7", "ts-node": "^8.1.0", - "typescript": "^3.4.3", + "typescript": "^3.4.4", "z-schema": "^4.0.2" }, "homepage": "https://github.com/azure/azure-rest-api-specs", diff --git a/preproduction-azure-pipelines.yml b/preproduction-azure-pipelines.yml index 4158edcf2393..0f888ebbd851 100644 --- a/preproduction-azure-pipelines.yml +++ b/preproduction-azure-pipelines.yml @@ -51,7 +51,7 @@ jobs: inputs: verbose: false - script: 'npm install -D @azure/rest-api-specs-scripts@0.3.9' - - script: 'npm run tsc && node scripts/semanticValidation2.js' + - script: 'npm run tsc && node scripts/semanticValidation.js' displayName: 'Semantic Validation' - job: "ModelValidation" @@ -63,7 +63,7 @@ jobs: inputs: verbose: false - script: 'npm install -D @azure/rest-api-specs-scripts@0.3.9' - - script: 'npm run tsc && node scripts/modelValidation2.js' + - script: 'npm run tsc && node scripts/modelValidation.js' displayName: 'Model Validation' - job: "Avocado" diff --git a/scripts/modelValidation.ts b/scripts/modelValidation.ts index d96a93e78bba..5fbebe6cfb99 100644 --- a/scripts/modelValidation.ts +++ b/scripts/modelValidation.ts @@ -1,41 +1,6 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License in the project root for license information. -import { devOps, cli } from '@azure/avocado' -import { utils } from '@azure/rest-api-specs-scripts' -import * as cp from 'child_process' +import { modelValidation } from '@azure/rest-api-specs-scripts' -const exec = (cmd: string, options?: cp.SpawnSyncOptions) => { - const result = cp.spawnSync( - cmd, - { - ...options, - shell: true, - stdio: [process.stdin, process.stdout, process.stderr] - } - ) - return result.status -} - -async function main() { - const pr = await devOps.createPullRequestProperties(cli.defaultConfig()) - const swaggersToProcess = await utils.getFilesChangedInPR(pr); - let result = 0 - for (const swagger of swaggersToProcess) { - try { - // await oav.validateExamples(swagger, null, {consoleLogLevel: 'error', pretty: true}); - // run OAV as a separate process to avoid memory issues. - const r = exec(`node node_modules/oav/dist/cli.js validate-example ${swagger} --pretty`) - if (result === 0) { - result = r - } - } catch (e) { - console.error("error: ") - console.error(e) - result = 1 - } - } - process.exitCode = result -} - -main().catch(e => { console.log(e); process.exit(1); }) +modelValidation.main().catch(e => { console.log(e); process.exit(1); }) diff --git a/scripts/modelValidation2.js b/scripts/modelValidation2.js deleted file mode 100644 index dbecf2b83d2a..000000000000 --- a/scripts/modelValidation2.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License in the project root for license information. -Object.defineProperty(exports, "__esModule", { value: true }); -const rest_api_specs_scripts_1 = require("@azure/rest-api-specs-scripts"); -rest_api_specs_scripts_1.modelValidation.main().catch(e => { console.log(e); process.exit(1); }); -//# sourceMappingURL=modelValidation2.js.map \ No newline at end of file diff --git a/scripts/momentOfTruthPostProcessing.js b/scripts/momentOfTruthPostProcessing.js deleted file mode 100644 index 0ebada4bc33f..000000000000 --- a/scripts/momentOfTruthPostProcessing.js +++ /dev/null @@ -1,423 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. - -'use strict'; - -const fs = require('fs'), - utils = require('../test/util/utils'), - path = require('path'), - gitHubPost = require('./postToGitHub'); - -let pullRequestNumber = utils.getPullRequestNumber(); -let targetBranch = utils.getTargetBranch(); -let filename = `${pullRequestNumber}.json`; -let logFilepath = path.join(getLogDir(), filename); - -function getLogDir() { - let logDir = path.join(__dirname, '../', 'output'); - return logDir; -} - -let githubTemplate = (title, contact_message, file_summaries) => `# AutoRest linter results for ${title}\n${contact_message}\n\n${file_summaries}`; - -let tooManyResults = "# Result limit exceeded, check build output\n" + - "The linter diff produced too many results to display here. Please view the build output to see the results. " + - "For help with SDK-related validation Errors / Warnings, reach out to [ADX Swagger Reviewers](mailto:adxsr@microsoft.com). " + - "For help with ARM-related validation Errors / Warnings, reach out to [ARM RP API Review](mailto:armrpapireview@microsoft.com).\n\n" + - `### [View Build Output](https://travis-ci.org/${process.env.TRAVIS_REPO_SLUG}/jobs/${process.env.TRAVIS_JOB_ID})`; - -let githubFooter = `[AutoRest Linter Guidelines](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/openapi-authoring-automated-guidelines.md) | ` + - `[AutoRest Linter Issues](https://github.com/Azure/azure-openapi-validator/issues) | ` + - `Send ${emailLink("feedback", "azure-swag-tooling@microsoft.com", "Feedback | AutoRest Linter Diff Tool")}` + - `\n\nThanks for your co-operation.`; - -let fileSummaryHeader = (file_name, file_href) => `## Config file: [${file_name}](${file_href})\n`; -let fileSummaryNewTemplate = (issue_type, issue_count, issue_table) => `

${iconFor(issue_type)} ${issue_count} new ${pluralize(issue_type, issue_count)}


\n\n${issue_table}\n
`; -let fileSummaryExistingTemplate = (issue_type, issue_count, issue_table) => `
${iconFor(issue_type)} ${issue_count} existing ${pluralize(issue_type, issue_count)}
\n\n${issue_table}\n
\n\n`; - -let potentialNewWarningErrorSummaryHeader = ` -| | Rule | Location | Message | -|-|------|----------|---------| -`; - -let potentialNewWarningErrorSummaryMarkdown = (count, warning_error_id, warning_error_code, warning_error_file, warning_error_line, warning_error_message) => - `|${count}|[${warning_error_id} - ${warning_error_code}](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/openapi-authoring-automated-guidelines.md#${warning_error_id})|` + - `[${shortName(warning_error_file)}:${warning_error_line}](${blobHref(warning_error_file)}#L${warning_error_line} "${warning_error_file}")|` + - `${warning_error_message}|\n`; - -let potentialNewWarningErrorSummaryPlain = (count, warning_error_id, warning_error_code, warning_error_file, warning_error_line, warning_error_message) => - `${warning_error_id} - ${warning_error_code}\n` + - `${warning_error_message}\n` + - ` at ${warning_error_file}:${warning_error_line}\n\n`; - -let sdkContactMessage = "These errors are reported by the SDK team's validation tools, reach out to [ADX Swagger Reviewers](mailto:adxsr@microsoft.com) directly for any questions or concerns."; -let armContactMessage = "These errors are reported by the ARM team's validation tools, reach out to [ARM RP API Review](mailto:armrpapireview@microsoft.com) directly for any questions or concerns."; -let sdkFileSummaries = '', armFileSummaries = ''; - -let data = undefined; -let jsonData = undefined; -try { - data = fs.readFileSync(logFilepath, 'utf8'); - jsonData = JSON.parse(data); -} catch (e) { - console.log(`Failed to read diff results from file ${logFilepath}`); - console.log("File content:"); - console.log(data); - process.exit(1) -} - -function compareJsonRef(beforeJsonRef, afterJsonRef) { - beforeJsonRef = beforeJsonRef.replace(/.*\.json:\d+:\d+/, '') - afterJsonRef = afterJsonRef.replace(/.*\.json:\d+:\d+/, '') - - return (beforeJsonRef == afterJsonRef); -} - -function getOutputMessages(newSDKErrorsCount, newARMErrorsCount, newSDKWarningsCount, newARMWarningsCount) { - const totalNewErrors = newSDKErrorsCount + newARMErrorsCount; - const totalNewWarnings = newSDKWarningsCount + newARMWarningsCount; - - const title = `${totalNewErrors} new ${pluralize('error', totalNewErrors)} / ${totalNewWarnings} new ${pluralize('warning', totalNewWarnings)}`; - let summary = `Compared to the target branch (**${targetBranch}**), this pull request introduces:\n\n`; - summary += formatSummaryLine("SDK Error", newSDKErrorsCount); - summary += formatSummaryLine("ARM Error", newARMErrorsCount); - summary += formatSummaryLine("SDK Warning", newSDKWarningsCount); - summary += formatSummaryLine("ARM Warning", newARMWarningsCount); - - return [title, summary]; -} - -function formatSummaryLine(issueType, count) { - let line = `   ${iconFor(issueType, count)}   `; - if (count > 0) { - line += '['; - } - line += `**${count}** new ${pluralize(issueType, count)}`; - if (count > 0) { - line += `](#user-content-${issueType.replace(/\s/g, "-")}s)`; - } - line += "\n\n"; - return line; -} - -function getSummaryBlock(summaryTitle, fileSummaries, contactMessage) { - return githubTemplate( - summaryTitle, - contactMessage, - fileSummaries !== "" ? fileSummaries : `**There were no files containing ${summaryTitle}.**` - ); -} - -function compareBeforeAfterArrays(afterArray, beforeArray, existingArray, newArray) { - if(afterArray.length > beforeArray.length){ - afterArray.forEach(afterValue => { - let errorFound = false; - beforeArray.forEach(beforeValue => { - if( - beforeValue.type == afterValue.type && - beforeValue.code == afterValue.code && - beforeValue.message == afterValue.message && - beforeValue.id == afterValue.id && - beforeValue.validationCategory == afterValue.validationCategory && - beforeValue.providerNamespace == afterValue.providerNamespace && - beforeValue.resourceType == afterValue.resourceType && - beforeValue.sources.length == afterValue.sources.length && - compareJsonRef(beforeValue.jsonref, afterValue.jsonref) - ) { - errorFound = true; - } - }); - if(errorFound) { - existingArray.push(afterValue); - } else { - newArray.push(afterValue); - } - }); - } -} - -function iconFor(type, num = undefined) { - if (num === 0) { - return ':white_check_mark:'; - } - - if (type.toLowerCase().includes('error')) { - return ':x:'; - } else { - return ':warning:'; - } -} - -function pluralize(word, num) { - return num !== 1 ? `${word}s` : word; -} - -function getLine(jsonRef) { - try { - return jsonRef.substr(jsonRef.indexOf(".json:") + 6).split(':')[0]; - } catch (error) { - return undefined; - } -} - -function getFile(jsonRef) { - try { - const start = jsonRef.indexOf("specification"); - return jsonRef.substr(start, (jsonRef.indexOf(".json") + 5) - start); - } catch (error) { - return undefined; - } -} - -function shortName(filePath) { - return `${path.basename(path.dirname(filePath))}/​${path.basename(filePath)}`; -} - -function blobHref(file) { - return `https://github.com/${process.env.TRAVIS_PULL_REQUEST_SLUG}/blob/${process.env.TRAVIS_PULL_REQUEST_SHA}/${file}`; -} - -function getFileSummaryTable(issues, header, formatter) { - let potentialNewIssues = header; - - issues.sort((a, b) => { - if (!a.filePath) { - a.filePath = getFile(a.jsonref) || ""; - a.lineNumber = getLine(a.jsonref) || "1"; - } - - if (!b.filePath) { - b.filePath = getFile(b.jsonref) || ""; - b.lineNumber = getLine(b.jsonref) || "1"; - } - - const comparison = a.filePath.localeCompare(b.filePath); - if (comparison !== 0) { - return comparison; - } else if (a.lineNumber !== b.lineNumber) { - return a.lineNumber - b.lineNumber; - } else { - return a.id.localeCompare(b.id); - } - }); - - issues.forEach(function (issue, count) { - if (!issue.filePath) { - issue.filePath = getFile(issue.jsonref) || ""; - issue.lineNumber = getLine(issue.jsonref) || "1"; - } - - potentialNewIssues += formatter( - count + 1, - issue.id, - issue.code, - issue.filePath, - issue.lineNumber, - issue.message - ); - }); - - return potentialNewIssues; -} - -function getFileSummary(issueType, fileName, existingWarnings, existingErrors, newWarnings, newErrors) { - let fileSummary = ""; - - if (newErrors.length > 0) { - fileSummary += fileSummaryNewTemplate(`${issueType} Error`, newErrors.length, getFileSummaryTable(newErrors, potentialNewWarningErrorSummaryHeader, potentialNewWarningErrorSummaryMarkdown)); - } - - if (existingErrors.length > 0) { - fileSummary += fileSummaryExistingTemplate(`${issueType} Error`, existingErrors.length, getFileSummaryTable(existingErrors, potentialNewWarningErrorSummaryHeader, potentialNewWarningErrorSummaryMarkdown)); - } - - if (fileSummary !== "") { - fileSummary += "
\n\n"; - } - - if (newWarnings.length > 0) { - fileSummary += fileSummaryNewTemplate(`${issueType} Warning`, newWarnings.length, getFileSummaryTable(newWarnings, potentialNewWarningErrorSummaryHeader, potentialNewWarningErrorSummaryMarkdown)); - } - - if (existingWarnings.length > 0) { - fileSummary += fileSummaryExistingTemplate(`${issueType} Warning`, existingWarnings.length, getFileSummaryTable(existingWarnings, potentialNewWarningErrorSummaryHeader, potentialNewWarningErrorSummaryMarkdown)); - } - - if (fileSummary !== "") { - return fileSummaryHeader(fileName, blobHref(fileName)) + fileSummary; - } else { - return ""; - } -} - -function emailLink(title, addr, subject = "", body = "") { - let link = `${title}`; - - return link; -} - -function postProcessing() { - let newSDKErrorsCount = 0, newARMErrorsCount = 0, newSDKWarningsCount = 0, newARMWarningsCount = 0; - - console.log("\n---------- Linter Diff Results ----------\n") - - if (!jsonData) { - const reportLink = emailLink( - "report this failure", - "azure-swag-tooling@microsoft.com", - "Failure | AutoRest Linter Diff Tool", - `Please examine the failure in PR https://github.com/${process.env.TRAVIS_REPO_SLUG}/pull/${pullRequestNumber}\r\nThe failing job is https://travis-ci.org/${process.env.TRAVIS_REPO_SLUG}/jobs/${process.env.TRAVIS_JOB_ID}` - ); - - const output = { - title: "Failed to produce a result", - summary: `The Linter Diff tool failed to produce a result. Work with your reviewer to examine the lint results manually before merging.\n\nPlease ${reportLink}!` - }; - - console.log("---output"); - console.log(JSON.stringify(output)); - console.log("---"); - - return; - } - - const configFiles = Object.keys(jsonData['files']); - configFiles.sort(); - - for (const fileName of configFiles) { - let beforeErrorsSDKArray = [], beforeWarningsSDKArray = [], beforeErrorsARMArray = [], beforeWarningsARMArray = []; - let afterErrorsSDKArray = [], afterWarningsSDKArray = [], afterErrorsARMArray = [], afterWarningsARMArray = []; - let newSDKErrors = [], newSDKWarnings = [], newARMErrors = [], newARMWarnings = []; - let existingSDKErrors = [], existingSDKWarnings = [], existingARMErrors = [], existingARMWarnings = []; - - let beforeErrorsAndWarningsArray = jsonData['files'][fileName]['before']; - beforeErrorsAndWarningsArray.forEach(beforeErrorOrWarning => { - if(beforeErrorOrWarning.type != undefined && beforeErrorOrWarning.type.toLowerCase() == 'warning'){ - if(beforeErrorOrWarning.validationCategory.toLowerCase() == 'sdkviolation') { - beforeWarningsSDKArray.push(beforeErrorOrWarning); - } else { - beforeWarningsARMArray.push(beforeErrorOrWarning); - } - } - - if(beforeErrorOrWarning.type != undefined && beforeErrorOrWarning.type.toLowerCase() == 'error'){ - if(beforeErrorOrWarning.validationCategory.toLowerCase() == 'sdkviolation') { - beforeErrorsSDKArray.push(beforeErrorOrWarning); - } else { - beforeErrorsARMArray.push(beforeErrorOrWarning); - } - } - }); - - let afterErrorsAndWarningsArray = jsonData['files'][fileName]['after']; - afterErrorsAndWarningsArray.forEach(afterErrorOrWarning => { - if(afterErrorOrWarning.type != undefined && afterErrorOrWarning.type.toLowerCase() == 'warning'){ - if(afterErrorOrWarning.validationCategory.toLowerCase() == 'sdkviolation') { - afterWarningsSDKArray.push(afterErrorOrWarning); - } else { - afterWarningsARMArray.push(afterErrorOrWarning); - } - } - - if(afterErrorOrWarning.type != undefined && afterErrorOrWarning.type.toLowerCase() == 'error'){ - if(afterErrorOrWarning.validationCategory.toLowerCase() == 'sdkviolation') { - afterErrorsSDKArray.push(afterErrorOrWarning); - } else { - afterErrorsARMArray.push(afterErrorOrWarning); - } - } - }); - - compareBeforeAfterArrays(afterErrorsARMArray, beforeErrorsARMArray, existingARMErrors, newARMErrors); - compareBeforeAfterArrays(afterErrorsSDKArray, beforeErrorsSDKArray, existingSDKErrors, newSDKErrors); - compareBeforeAfterArrays(afterWarningsARMArray, beforeWarningsARMArray, existingARMWarnings, newARMWarnings); - compareBeforeAfterArrays(afterWarningsSDKArray, beforeWarningsSDKArray, existingSDKWarnings, newSDKWarnings); - - console.log(`Config file: ${fileName}\n`) - console.log("SDK Errors/Warnings"); - console.log("==================="); - console.log("Errors: Before: ", beforeErrorsSDKArray.length, " - After: ", afterErrorsSDKArray.length); - console.log("Warnings: Before: ", beforeWarningsSDKArray.length, " - After: ", afterWarningsSDKArray.length); - console.log("New SDK Errors: ", newSDKErrors.length); - console.log("New SDK Warnings: ", newSDKWarnings.length); - console.log("Existing SDK Errors: ", existingSDKErrors.length); - console.log("Existing SDK Warnings: ", existingSDKWarnings.length); - console.log(); - console.log("ARM Errors/Warnings"); - console.log("==================="); - console.log("Errors: Before: ", beforeErrorsARMArray.length, " - After: ", afterErrorsARMArray.length); - console.log("Warnings: Before: ", beforeWarningsARMArray.length, " - After: ", afterWarningsARMArray.length); - console.log("New ARM Errors: ", newARMErrors.length); - console.log("New ARM Warnings: ", newARMWarnings.length); - console.log("Existing ARM Errors: ", existingARMErrors.length); - console.log("Existing ARM Warnings: ", existingARMWarnings.length); - console.log(); - - if (newSDKErrors.length > 0) { - console.log(`Potential new SDK errors`) - console.log("========================"); - console.log(getFileSummaryTable(newSDKErrors, "", potentialNewWarningErrorSummaryPlain)); - } - if (newSDKWarnings.length > 0) { - console.log(`Potential new SDK warnings`) - console.log("=========================="); - console.log(getFileSummaryTable(newSDKWarnings, "", potentialNewWarningErrorSummaryPlain)); - } - if (newARMErrors.length > 0) { - console.log(`Potential new ARM errors`) - console.log("========================"); - console.log(getFileSummaryTable(newARMErrors, "", potentialNewWarningErrorSummaryPlain)); - } - if (newARMWarnings.length > 0) { - console.log(`Potential new ARM warnings`) - console.log("=========================="); - console.log(getFileSummaryTable(newARMWarnings, "", potentialNewWarningErrorSummaryPlain)); - } - - console.log("-----------------------------------------\n") - - newSDKErrorsCount += newSDKErrors.length; - newARMErrorsCount += newARMErrors.length; - newSDKWarningsCount += newSDKWarnings.length; - newARMWarningsCount += newARMWarnings.length; - - sdkFileSummaries += getFileSummary("SDK", fileName, existingSDKWarnings, existingSDKErrors, newSDKWarnings, newSDKErrors); - armFileSummaries += getFileSummary("ARM", fileName, existingARMWarnings, existingARMErrors, newARMWarnings, newARMErrors); - } - - const sdkSummary = getSummaryBlock("SDK-related validation Errors / Warnings", sdkFileSummaries, sdkContactMessage); - const armSummary = getSummaryBlock("ARM-related validation Errors / Warnings", armFileSummaries, armContactMessage); - const text = `${sdkSummary}

\n\n${armSummary}

\n\n${githubFooter}`; - - const [title, summary] = getOutputMessages(newSDKErrorsCount, newARMErrorsCount, newSDKWarningsCount, newARMWarningsCount); - const output = { - title, - summary, - text: text.length <= 65535 ? text : `${tooManyResults}

\n\n${githubFooter}` - } - - console.log("---output"); - console.log(JSON.stringify(output, null, 2)); - console.log("---"); - - if(process.env.TRAVIS_REPO_SLUG != undefined && process.env.TRAVIS_REPO_SLUG.endsWith("-pr")) { - let slug = process.env.TRAVIS_REPO_SLUG; - slug = slug.split("/")[1]; - gitHubPost.postGithubComment("Azure", slug, pullRequestNumber, output.text); - } - - if (newSDKErrorsCount > 0 || newARMErrorsCount > 0) { - process.exitCode = 1; - } -} - -postProcessing(); diff --git a/scripts/momentOfTruthPostProcessing.ts b/scripts/momentOfTruthPostProcessing.ts new file mode 100644 index 000000000000..932bf87d5614 --- /dev/null +++ b/scripts/momentOfTruthPostProcessing.ts @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { momentOfTruthPostProcessing } from '@azure/rest-api-specs-scripts' + +momentOfTruthPostProcessing.postProcessing(); diff --git a/scripts/multiapi.ts b/scripts/multiapi.ts index c7508904f776..f4d3f0493cc2 100644 --- a/scripts/multiapi.ts +++ b/scripts/multiapi.ts @@ -1,3 +1,6 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + import * as fs from "@ts-common/fs" import * as process from "process" import * as path from "path" diff --git a/scripts/postToGitHub.js b/scripts/postToGitHub.js deleted file mode 100644 index f162b6ad8b8a..000000000000 --- a/scripts/postToGitHub.js +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. - -'use strict'; - -const octokit = require('@octokit/rest')(); -let token = process.env.GITHUB_TOKEN; - -if(token != undefined) { - octokit.authenticate({ - type: 'token', - token: token - }); -} - -module.exports = { - postGithubComment: function(owner, repository, prNumber, commentBody) { - octokit.issues.createComment({ - "owner": owner, - "repo": repository, - "number": prNumber, - "body": commentBody - }).then(data => { - console.log("Comment has been posted"); - }). catch(err => { - console.log(err); - }); - } -} \ No newline at end of file diff --git a/scripts/semanticValidation.ts b/scripts/semanticValidation.ts index 2cff59e18f98..e657bc580e99 100644 --- a/scripts/semanticValidation.ts +++ b/scripts/semanticValidation.ts @@ -1,27 +1,6 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License in the project root for license information. -import { devOps, cli } from '@azure/avocado' -import { utils } from '@azure/rest-api-specs-scripts' -import * as oav from 'oav' +import { semanticValidation } from '@azure/rest-api-specs-scripts' -async function main() { - const pr = await devOps.createPullRequestProperties(cli.defaultConfig()) - const swaggersToProcess = await utils.getFilesChangedInPR(pr); - // Useful when debugging a test for a particular swagger. - // Just update the regex. That will return an array of filtered items. - // swaggersToProcess = swaggersToProcess.filter(function(item) { - // return (item.match(/.*Microsoft.Logic.*2016-06-01.*/ig) !== null); - // }); - for (const swagger of swaggersToProcess) { - try { - await oav.validateSpec(swagger, {consoleLogLevel: 'error', pretty: true}); - } catch (e) { - console.error("error: ") - console.error(e) - process.exitCode = 1 - } - } -} - -main().catch(e => { console.log(e); process.exit(1); }) \ No newline at end of file +semanticValidation.main().catch(e => { console.log(e); process.exit(1); }) diff --git a/scripts/semanticValidation2.js b/scripts/semanticValidation2.js deleted file mode 100644 index 0cb8089fd62f..000000000000 --- a/scripts/semanticValidation2.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License in the project root for license information. -Object.defineProperty(exports, "__esModule", { value: true }); -const rest_api_specs_scripts_1 = require("@azure/rest-api-specs-scripts"); -rest_api_specs_scripts_1.semanticValidation.main().catch(e => { console.log(e); process.exit(1); }); -//# sourceMappingURL=semanticValidation2.js.map \ No newline at end of file diff --git a/scripts/ts-utils.ts b/scripts/ts-utils.ts deleted file mode 100644 index 9b9dbb94a898..000000000000 --- a/scripts/ts-utils.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License in the project root for license information. - -export const asNonUndefined = (v: T|undefined) => v as T \ No newline at end of file diff --git a/test/linter.js b/test/linter.js deleted file mode 100644 index 550d336df8da..000000000000 --- a/test/linter.js +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License in the project root for license information. - -'use strict'; -var - execSync = require('child_process').execSync, - utils = require('./util/utils'), - fs = require('fs'), - literate = require('@microsoft.azure/literate'), - async_io_1 = require("@microsoft.azure/async-io"), - path = require('path'); - -async function readConfigFile(file, tag) { - // autorest configuration type - const msgs = new literate.MessageEmitter(); - msgs.Message.Subscribe((src, each) => console.log(each.Text)); - const cfg = new literate.Configuration("\n> see https://aka.ms/autorest", new literate.DiskFileSystem("readme.md"), async_io_1.ResolveUri(async_io_1.CreateFolderUri(process.cwd()), async_io_1.CreateFileUri(path.resolve(file)) || "."), { - "input-file": [], - }); - return await cfg.CreateView(msgs, true, { tag: tag }); -} - -async function getTagsMapFromConfig(args) { - if (!fs.existsSync(path.resolve(args.file))) { - console.error('config file invalid. cannot read tags from file ' + config); - return null; - } - - const allTags = scanForTags(fs.readFileSync(args.file)); - const result = {}; - for (const each of allTags) { - result[each] = (await readConfigFile(args.file, each))["input-file"]; - } - return result; -} - -function scanForTags(content) { - const result = new Array(); - const rx = /\$\(tag\)(.*)/g; - let match = rx.exec(content); - while (match) { - const vrx = /['"](.*?)['"]/g; - let v = vrx.exec(match[1]); - if (v && v.length && result.indexOf(v[1]) == -1) { - result.push(v[1]); - } - match = rx.exec(content); - } - return result; -} - -async function getTagsFromConfig(config) { - // get hold of all tags and their corresponding input files using the literate config tool - const tagsMap = await getTagsMapFromConfig({ file: config }); - if (tagsMap === null) { - return null; - } - const tags = Object.keys(tagsMap); - - // filter the tags - if (utils.prOnly) { - // get path to the modified files relative to their corresponding md file, need to do this since - // config files have relative paths to the input files - let allModifiedFiles = utils.getFilesChangedInPR(); - allModifiedFiles = allModifiedFiles.map(mfile => { - return mfile.replace(path.dirname(config) + '/', ''); - }); - - // for each tag->files, find if there are any modified files and select those tags - return tags.filter(tag => { - - const tagFiles = (String(tagsMap[tag])).split(','); - // find intersection with the modified files - return tagFiles.filter(tagFile => { - return allModifiedFiles.indexOf(tagFile) > -1; - }).length > 0; - }); - } - return tags; -} - -function execLinterCommand(args) { - var cmd = `npx autorest@2.0.4152 --validation --azure-validator --message-format=json ${args}`.trim(); - console.log(`Executing: ${cmd}`); - var errorsFound = false; - try { - let result = execSync(cmd, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 64 }); - console.error(result); - } catch (err) { - errorsFound = true; - console.error('Linter validation contains error(s)'); - } - - return errorsFound; -} - -describe('AutoRest Linter validation:', function () { - if (utils.prOnly) { - // Useful when debugging a test for a particular swagger. - // Just update the regex. That will return an array of filtered items. - // configsToProcess = ['specification/sql/resource-manager/readme.md']; - let configsToProcess = utils.getConfigFilesChangedInPR(); - for (const config of configsToProcess) { - it(config + ' should honor linter validation rules.', async function () { - - // find all tags in the config file - const tagsToProcess = await getTagsFromConfig(config); - - let errorsFound = false; - - // if no tags found to process, run with the defaults - if (tagsToProcess === null || tagsToProcess.length === 0) { - // no tags found - // this means we need to run validator against the individual - // json files included in the PR - // but in the same directory tree as the config file - const filesChangedInPR = utils.getFilesChangedInPR(); - const configDir = path.dirname(config); - filesChangedInPR.filter(prFile => { - // set any type to string - prFile += ''; - return prFile.startsWith(configDir) && prFile.indexOf('examples') === -1 && prFile.endsWith('.json'); - }).forEach(prFileInConfigFile => { - console.warn(`WARNING: Configuration file not found for file: ${prFileInConfigFile}, running validation rules against it in individual context.`); - errorsFound = execLinterCommand(`--input-file=${prFileInConfigFile}`) && errorsFound; - }); - } - else { - // if tags found, run linter against every single tag - tagsToProcess.forEach((tagToProcess) => { - errorsFound = execLinterCommand(`${config} --tag=${tagToProcess}`) && errorsFound; - }, this); - } - - if(errorsFound == true) { - throw new Error('Linter validation contains error(s)'); - } - - - }); - } - } - else { - // we are not handling pr_only=false case today, - // to enable, we need to write logic to calculate - // all config files in the repo and run linter with - // every tag in the md file; we can get tags each - // config file from getTagsFromCinfig file - console.warn('Cannot run linter in pr_only false mode'); - } -}); diff --git a/test/syntax.js b/test/syntax.ts similarity index 60% rename from test/syntax.js rename to test/syntax.ts index fec45a99754c..bae563bf04da 100644 --- a/test/syntax.js +++ b/test/syntax.ts @@ -1,18 +1,24 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License in the project root for license information. -'use strict'; var assert = require("assert"), - fs = require('fs'), - path = require('path'), RefParser = require('json-schema-ref-parser'), util = require('util'), - utils = require('./util/utils'); + utils = require('@azure/rest-api-specs-scripts').utils; -var context; +type Context = { + readonly validator: { + readonly validate: (parsedData: unknown, schema: unknown) => unknown + readonly getLastErrors: () => unknown + } + readonly extensionSwaggerSchema: unknown + readonly exampleSchema: unknown +} + +let syntaxContext: Context -// Useful when debugging a test for a particular swagger. +// Useful when debugging a test for a particular swagger. // Just update the regex. That will return an array of filtered items. // utils.swaggers = utils.swaggers.filter(function(item) { // return (item.match(/.*Microsoft.Logic.*2016-06-01.*/ig) !== null); @@ -24,19 +30,19 @@ var context; describe('Azure swagger schema validation:', function () { before(function (done) { - utils.initializeValidator().then((result) => { - context = result; + utils.initializeValidator().then((result: Context) => { + syntaxContext = result; done(); }); - + }); - for (const swagger of utils.swaggers) { + for (const swagger of utils.getSwaggers()) { it(swagger + ' should be a valid Swagger document.', function (done) { - utils.parseJsonFromFile(swagger).then((parsedData)=> { - var valid = context.validator.validate(parsedData, context.extensionSwaggerSchema); + utils.parseJsonFromFile(swagger).then((parsedData: unknown)=> { + var valid = syntaxContext.validator.validate(parsedData, syntaxContext.extensionSwaggerSchema); if (!valid) { - var error = context.validator.getLastErrors(); + var error = syntaxContext.validator.getLastErrors(); throw new Error("Schema validation failed: " + util.inspect(error, { depth: null })); } assert(valid === true); @@ -46,12 +52,12 @@ describe('Azure swagger schema validation:', function () { } describe('Azure x-ms-example schema validation:', function () { - for (const example of utils.examples) { + for (const example of utils.getExamples()) { it('x-ms-examples: ' + example + ' should be a valid x-ms-example.', function (done) { - utils.parseJsonFromFile(example).then((parsedData) => { - var valid = context.validator.validate(parsedData, context.exampleSchema); + utils.parseJsonFromFile(example).then((parsedData: unknown) => { + var valid = syntaxContext.validator.validate(parsedData, syntaxContext.exampleSchema); if (!valid) { - var error = context.validator.getLastErrors(); + var error = syntaxContext.validator.getLastErrors(); throw new Error("Schema validation failed: " + util.inspect(error, { depth: null })); } assert(valid === true); @@ -63,9 +69,9 @@ describe('Azure swagger schema validation:', function () { }); describe('External file or url references ("$ref") in a swagger spec:', function () { - for (const swagger of utils.swaggers) { + for (const swagger of utils.getSwaggers()) { it(swagger + ' should be completely resolvable.', function (done) { - RefParser.bundle(swagger, function (bundleErr, bundleResult) { + RefParser.bundle(swagger, function (bundleErr: { readonly message: unknown }, _bundleResult: unknown) { if (bundleErr) { var msg = swagger + ' has references that cannot be resolved. They are as follows: \n' + util.inspect(bundleErr.message, { depth: null }); console.log(msg); diff --git a/test/util/utils.ts b/test/util/utils.ts deleted file mode 100644 index d2ffb4c61d08..000000000000 --- a/test/util/utils.ts +++ /dev/null @@ -1,347 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License in the project root for license information. - -import * as tsUtils from '../../scripts/ts-utils' -import * as stringMap from '@ts-common/string-map' -import * as os from 'os' -import * as fs from 'fs-extra' -import * as glob from 'glob' -import * as path from 'path' -const z = require('z-schema') -import * as YAML from 'js-yaml' -import request = require('request') -import * as util from 'util' -import { execSync } from 'child_process' - -const asyncJsonRequest = (url: string) => new Promise((res, rej) => request( - { url, json: true }, - (error: unknown, _: unknown, body: unknown) => error ? rej(error) : res(body) -)); - -export const extensionSwaggerSchemaUrl = "https://raw.githubusercontent.com/Azure/autorest/master/schema/swagger-extensions.json"; -export const swaggerSchemaUrl = "http://json.schemastore.org/swagger-2.0"; -export const swaggerSchemaAltUrl = "http://23.22.16.221/v2/schema.json"; -export const schemaUrl = "http://json-schema.org/draft-04/schema"; -export const exampleSchemaUrl = "https://raw.githubusercontent.com/Azure/autorest/master/schema/example-schema.json"; -export const compositeSchemaUrl = "https://raw.githubusercontent.com/Azure/autorest/master/schema/composite-swagger.json"; - -export const isWindows = (process.platform.lastIndexOf('win') === 0); -export const prOnly = undefined !== process.env['PR_ONLY'] ? process.env['PR_ONLY'] : 'false'; - -export const globPath = path.join(__dirname, '../', '../', '/specification/**/*.json'); -export const swaggers = glob.sync(globPath, { ignore: ['**/examples/**/*.json', '**/quickstart-templates/*.json', '**/schema/*.json'] }); -export const exampleGlobPath = path.join(__dirname, '../', '../', '/specification/**/examples/**/*.json'); -export const examples = glob.sync(exampleGlobPath); -export const readmes = glob.sync(path.join(__dirname, '../', '../', '/specification/**/readme.md')); - -// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) -// because the buffer-to-string conversion in `fs.readFile()` -// translates it to FEFF, the UTF-16 BOM. -export const stripBOM = function(content: Buffer|string) { - if (Buffer.isBuffer(content)) { - content = content.toString(); - } - if (content.charCodeAt(0) === 0xFEFF || content.charCodeAt(0) === 0xFFFE) { - content = content.slice(1); - } - return content; -}; - -/** - * Parses the json from the given filepath - * @returns {string} clr command - */ -export const parseJsonFromFile = async function(filepath: string) { - const data = await fs.readFile(filepath, { encoding: 'utf8' }); - try { - return YAML.safeLoad(stripBOM(data)); - } catch (error) { - throw new Error(`swagger "${filepath}" is an invalid JSON.\n${util.inspect(error, { depth: null })}`); - } -}; - -/** - * Gets the name of the target branch to which the PR is sent. We are using the environment - * variable provided by travis-ci. It is called TRAVIS_BRANCH. More info can be found here: - * https://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables - * If the environment variable is undefined then the method returns 'master' as the default value. - * @returns {string} branchName The target branch name. - */ -export const getTargetBranch = function() { - console.log(`@@@@@ process.env['TRAVIS_BRANCH'] - ${process.env['TRAVIS_BRANCH']}`); - let result = process.env['TRAVIS_BRANCH'] || 'master'; - result = result.trim(); - console.log(`>>>>> The target branch is: "${result}".`); - return result; -}; - -/** - * Check out a copy of a branch to a temporary location, execute a function, and then restore the previous state - */ -export const doOnBranch = async function(branch: unknown, func: () => Promise) { - fetchBranch(branch); - const branchSha = resolveRef(`origin/${branch}`); - const tmpDir = path.join(os.tmpdir(), branchSha); - - const currentDir = process.cwd(); - checkoutBranch(branch, tmpDir); - - console.log(`Changing directory and executing the function...`); - process.chdir(tmpDir); - const result = await func(); - - console.log(`Restoring previous directory and deleting secondary working tree...`); - process.chdir(currentDir); - execSync(`rm -rf ${tmpDir}`); - - return result; -} - -/** - * Resolve a ref to its commit hash - */ -export const resolveRef = function(ref: unknown) { - let cmd = `git rev-parse ${ref}`; - console.log(`> ${cmd}`); - return execSync(cmd, { encoding: 'utf8' }).trim(); -} - -/** - * Fetch ref for a branch from the origin - */ -export const fetchBranch = function(branch: unknown) { - let cmds = [ - `git remote -vv`, - `git branch --all`, - `git remote set-branches origin --add ${branch}`, - `git fetch origin ${branch}` - ]; - - console.log(`Fetching branch ${branch} from origin...`); - for (let cmd of cmds) { - console.log(`> ${cmd}`); - execSync(cmd, { encoding: 'utf8', stdio: 'inherit' }); - } -} - -/** - * Checkout a copy of branch to location - */ -export const checkoutBranch = function(ref: unknown, location: unknown) { - let cmd = `git worktree add -f ${location} origin/${ref}`; - console.log(`Checking out a copy of branch ${ref} to ${location}...`); - console.log(`> ${cmd}`); - execSync(cmd, { encoding: 'utf8', stdio: 'inherit' }); -} - -/** - * Gets the name of the source branch from which the PR is sent. - * @returns {string} branchName The source branch name. - */ -export const getSourceBranch = function() { - let cmd = 'git rev-parse --abbrev-ref HEAD'; - let result = process.env['TRAVIS_PULL_REQUEST_BRANCH']; - console.log(`@@@@@ process.env['TRAVIS_PULL_REQUEST_BRANCH'] - ${process.env['TRAVIS_PULL_REQUEST_BRANCH']}`); - if (!result) { - try { - result = execSync(cmd, { encoding: 'utf8' }); - } catch (err) { - console.log(`An error occurred while getting the current branch ${util.inspect(err, { depth: null })}.`); - } - } - result = tsUtils.asNonUndefined(result).trim(); - console.log(`>>>>> The source branch is: "${result}".`); - return result; -}; - -/** - * Gets the PR number. We are using the environment - * variable provided by travis-ci. It is called TRAVIS_PULL_REQUEST. More info can be found here: - * https://docs.travis-ci.com/user/environment-variables/#Convenience-Variables - * @returns {string} PR number or 'undefined'. - */ -export const getPullRequestNumber = function() { - let result = process.env['TRAVIS_PULL_REQUEST']; - console.log(`@@@@@ process.env['TRAVIS_PULL_REQUEST'] - ${process.env['TRAVIS_PULL_REQUEST']}`); - - if (!result) { - result = 'undefined'; - } - - return result; -}; - -/** - * Gets the Repo name. We are using the environment - * variable provided by travis-ci. It is called TRAVIS_REPO_SLUG. More info can be found here: - * https://docs.travis-ci.com/user/environment-variables/#Convenience-Variables - * @returns {string} repo name or 'undefined'. - */ -export const getRepoName = function() { - let result = process.env['TRAVIS_REPO_SLUG']; - console.log(`@@@@@ process.env['TRAVIS_REPO_SLUG'] - ${result}`); - - return result; -}; - -/** - * Gets the source repo name for PR's. We are using the environment - * variable provided by travis-ci. It is called TRAVIS_PULL_REQUEST_SLUG. More info can be found here: - * https://docs.travis-ci.com/user/environment-variables/#Convenience-Variables - * @returns {string} repo name or 'undefined'. - */ -export const getSourceRepoName = function() { - let result = process.env['TRAVIS_PULL_REQUEST_SLUG']; - console.log(`@@@@@ process.env['TRAVIS_PULL_REQUEST_SLUG'] - ${result}`); - - return result; -}; - -// Retrieves Git Repository Url -/** - * Gets the repo URL - * @returns {string} repo URL or 'undefined' - */ -export const getRepoUrl = function() { - let repoName = getRepoName(); - return `https://github.com/${repoName}`; -}; - -// Retrieves the source Git Repository Url -/** - * Gets the repo URL from where the PR originated - * @returns {string} repo URL or 'undefined' - */ -export const getSourceRepoUrl = function() { - let repoName = getSourceRepoName(); - return `https://github.com/${repoName}`; -}; - -export const getTimeStamp = function() { - // We pad each value so that sorted directory listings show the files in chronological order - function pad(number: any): any { - if (number < 10) { - return '0' + number; - } - - return number; - } - - var now = new Date(); - return now.getFullYear() - + pad(now.getMonth() + 1) - + pad(now.getDate()) - + "_" - + pad(now.getHours()) - + pad(now.getMinutes()) - + pad(now.getSeconds()); -} - -/** - * Retrieves list of swagger files to be processed for linting - * @returns {Array} list of files to be processed for linting - */ -export const getConfigFilesChangedInPR = function() { - if (prOnly === 'true') { - let targetBranch, cmd, filesChanged; - try { - targetBranch = getTargetBranch(); - execSync(`git fetch origin ${targetBranch}`); - cmd = `git diff --name-only HEAD $(git merge-base HEAD FETCH_HEAD)`; - filesChanged = execSync(cmd, { encoding: 'utf8' }).split('\n'); - console.log('>>>>> Files changed in this PR are as follows:'); - console.log(filesChanged); - - // traverse up to readme.md files - const configFiles = new Set(); - for (let fileChanged of filesChanged) { - while (fileChanged.startsWith("specification")) { - if (fileChanged.toLowerCase().endsWith("readme.md") && fs.existsSync(fileChanged)) { - configFiles.add(fileChanged); - break; - } - // select parent readme - const parts = fileChanged.split('/'); - parts.pop(); - parts.pop(); - parts.push("readme.md"); - fileChanged = parts.join('/'); - } - } - filesChanged = [...configFiles.values()]; - - console.log('>>>>> Affected configuration files:'); - console.log(filesChanged); - - return filesChanged; - } catch (err) { - throw err; - } - } else { - return swaggers; - } -}; - -/** - * Retrieves list of swagger files to be processed for linting - * @returns {Array} list of files to be processed for linting - */ -export const getFilesChangedInPR = function() { - let result = swaggers; - if (prOnly === 'true') { - let targetBranch, cmd, filesChanged, swaggerFilesInPR; - try { - targetBranch = getTargetBranch(); - execSync(`git fetch origin ${targetBranch}`); - cmd = `git diff --name-only HEAD $(git merge-base HEAD FETCH_HEAD)`; - filesChanged = execSync(cmd, { encoding: 'utf8' }); - console.log('>>>>> Files changed in this PR are as follows:') - console.log(filesChanged); - swaggerFilesInPR = filesChanged.split('\n').filter(function (item: string) { - if (item.match(/.*(json|yaml)$/ig) == null || item.match(/.*specification.*/ig) == null) { - return false; - } - if (item.match(/.*\/examples\/*/ig) !== null) { - return false; - } - if (item.match(/.*\/quickstart-templates\/*/ig) !== null) { - return false; - } - return true; - }); - console.log(`>>>> Number of swaggers found in this PR: ${swaggerFilesInPR.length}`); - - var deletedFiles = swaggerFilesInPR.filter(function (swaggerFile: string) { - return !fs.existsSync(swaggerFile); - }); - console.log('>>>>> Files deleted in this PR are as follows:') - console.log(deletedFiles); - // Remove files that have been deleted in the PR - swaggerFilesInPR = swaggerFilesInPR.filter(function (x: string) { return deletedFiles.indexOf(x) < 0 }); - - result = swaggerFilesInPR; - } catch (err) { - throw err; - } - } - return result; -}; - -/** - * Downloads the remote schemas and initializes the validator with remote references. - * @returns {Object} context Provides the schemas in json format and the validator. - */ -export const initializeValidator = async function() { - const context: stringMap.MutableStringMap = { - extensionSwaggerSchema: await asyncJsonRequest(extensionSwaggerSchemaUrl), - swaggerSchema: await asyncJsonRequest(swaggerSchemaAltUrl), - exampleSchema: await asyncJsonRequest(exampleSchemaUrl), - compositeSchema: await asyncJsonRequest(compositeSchemaUrl) - }; - let validator = new z({ breakOnFirstError: false }); - validator.setRemoteReference(swaggerSchemaUrl, context.swaggerSchema); - validator.setRemoteReference(exampleSchemaUrl, context.exampleSchema); - validator.setRemoteReference(compositeSchemaUrl, context.compositeSchema); - context.validator = validator; - return context; -};