Skip to content

Commit

Permalink
Merge branch 'main' into patch-2
Browse files Browse the repository at this point in the history
  • Loading branch information
cmwilson21 authored Mar 30, 2023
2 parents 5a0e3f0 + 8c729cb commit f24032c
Show file tree
Hide file tree
Showing 7,431 changed files with 5,621,549 additions and 13,656,336 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
6 changes: 3 additions & 3 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.177.0/containers/javascript-node/.devcontainer/base.Dockerfile
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/blob/main/containers/javascript-node/.devcontainer/base.Dockerfile

# [Choice] Node.js version: 16, 14, 12
ARG VARIANT="16-buster"
# [Choice] Node.js version
ARG VARIANT="18-buster"
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}

# [Optional] Uncomment this section to install additional OS packages.
Expand Down
7 changes: 3 additions & 4 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"name": "docs.github.com",
"build": {
"dockerfile": "Dockerfile",
// Update 'VARIANT' to pick a Node version: 12, 14, 16
"args": { "VARIANT": "16" }
// Update 'VARIANT' to pick a Node version
"args": { "VARIANT": "18" }
},

// Set *default* container specific settings.json values on container create.
Expand All @@ -17,7 +17,6 @@

// Install features. Type 'feature' in the VS Code command palette for a full list.
"features": {
"git-lfs": "latest",
"sshd": "latest"
},

Expand All @@ -42,7 +41,7 @@
},

// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "git lfs pull && npm ci",
"postCreateCommand": "npm ci",

// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "node"
Expand Down
5 changes: 3 additions & 2 deletions .devcontainer/test-custom-devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"name": "test",
"name": "Test postCreateCommand",
"image": "mcr.microsoft.com/devcontainers/universal:linux",

"settings": {
"terminal.integrated.shell.linux": "/bin/zsh",
Expand All @@ -21,6 +22,6 @@
"forwardPorts": [5000],

// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "echo test > aaa-TEST.txt"
"postCreateCommand": "echo Added: `date` > aaa-TEST.txt"

}
2 changes: 0 additions & 2 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ docs/
node_modules/
script/
tests/
# These files are currently being added by automation in github/github. We will be removing the files permanently when we fix the broken automation in github/github.
lib/rest/static/dereferenced
# Folder is cloned during the preview + prod workflows, the assets are merged into other locations for use before the build
docs-early-access/
# During the preview deploy untrusted user code may be cloned into this directory
Expand Down
1 change: 0 additions & 1 deletion .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
/.github/ @github/docs-engineering
/script/ @github/docs-engineering
/includes/ @github/docs-engineering
/lib/search/popular-pages.json @github/docs-engineering
Dockerfile @github/docs-engineering
package-lock.json @github/docs-engineering
package.json @github/docs-engineering
Expand Down
4 changes: 2 additions & 2 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ Thank you for contributing to this project! You must fill out the information be

### Why:

Closes ISSUE
Closes:

<!-- If there's an existing issue for your change, please replace ISSUE above with a link to the issue.
<!-- If there's an existing issue for your change, please link to it above.
If there's _not_ an existing issue, please open one first to make it more likely that this update will be accepted: https://github.com/github/docs/issues/new/choose. -->

### What's being changed (if available, include any code snippets, screenshots, or gifs):
Expand Down
48 changes: 32 additions & 16 deletions .github/actions-scripts/content-changes-table-comment.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,50 +28,60 @@ const MAX_COMMENT_SIZE = 125000

const PROD_URL = 'https://docs.github.com'

run()
// When this file is invoked directly from action as opposed to being imported
if (import.meta.url.endsWith(process.argv[1])) {
const owner = context.repo.owner
const repo = context.payload.repository.name
const baseSHA = context.payload.pull_request.base.sha
const headSHA = context.payload.pull_request.head.sha

async function run() {
const isHealthy = await waitUntilUrlIsHealthy(new URL('/healthz', APP_URL).toString())
if (!isHealthy) {
return core.setFailed(`Timeout waiting for preview environment: ${APP_URL}`)
core.setFailed(`Timeout waiting for preview environment: ${APP_URL}`)
} else {
const markdownTable = await main(owner, repo, baseSHA, headSHA)
core.setOutput('changesTable', markdownTable)
}
}

async function main(owner, repo, baseSHA, headSHA) {
const octokit = github.getOctokit(GITHUB_TOKEN)
// get the list of file changes from the PR
const response = await octokit.rest.repos.compareCommitsWithBasehead({
owner: context.repo.owner,
repo: context.payload.repository.name,
basehead: `${context.payload.pull_request.base.sha}...${context.payload.pull_request.head.sha}`,
owner,
repo,
basehead: `${baseSHA}...${headSHA}`,
})

const { files } = response.data

let markdownTable =
'| **Source** | **Preview** | **Production** | **What Changed** |\n|:----------- |:----------- |:----------- |:----------- |\n'
const markdownTableHead = [
'| **Source** | **Preview** | **Production** | **What Changed** |',
'|:----------- |:----------- |:----------- |:----------- |',
]
let markdownTable = ''

const pathPrefix = 'content/'
const articleFiles = files.filter(
({ filename }) => filename.startsWith(pathPrefix) && !filename.endsWith('/index.md')
({ filename }) => filename.startsWith(pathPrefix) && filename.toLowerCase() !== 'readme.md'
)

const lines = await Promise.all(
articleFiles.map(async (file) => {
const sourceUrl = file.blob_url
const fileName = file.filename.slice(pathPrefix.length)
const fileUrl = fileName.slice(0, fileName.lastIndexOf('.'))
const fileUrl = fileName.replace('/index.md', '').replace(/\.md$/, '')

// get the file contents and decode them
// this script is called from the main branch, so we need the API call to get the contents from the branch, instead
const fileContents = await getContents(
context.repo.owner,
context.payload.repository.name,
owner,
repo,
// Can't get its content if it no longer exists.
// Meaning, you'd get a 404 on the `getContents()` utility function.
// So, to be able to get necessary meta data about what it *was*,
// if it was removed, fall back to the 'base'.
file.status === 'removed'
? context.payload.pull_request.base.sha
: context.payload.pull_request.head.sha,
file.status === 'removed' ? baseSHA : headSHA,
file.filename
)

Expand Down Expand Up @@ -164,7 +174,13 @@ async function run() {
return previous
}, markdownTable.length)

if (cappedLines.length) {
cappedLines.unshift(...markdownTableHead)
}

markdownTable += cappedLines.join('\n')

core.setOutput('changesTable', markdownTable)
return markdownTable
}

export default main
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ Poke around several pages, ensure that the stylesheets are working properly, ima

- [ ] In your `docs-internal` checkout, create a new branch `remove-<version>-static-files` branch: `git checkout -b remove-<version>-static-files` (you can branch off of `main` or from your `deprecate-<version>` branch, up to you).
- [ ] Run `script/enterprise-server-deprecations/remove-static-files.js` and commit results.
- [ ] Re-generate the static files by running `script/rest/update-files.js --decorate-only`.
- [ ] Re-generate the static files by running `src/rest/scripts/update-files.js --decorate-only`.
- [ ] Open a new PR.
- [ ] Get a review from docs-engineering and merge. This step can be merged independently from step 6. The purpose of splitting up steps 5 and 6 is to focus the review on specific files.

Expand Down Expand Up @@ -158,8 +158,8 @@ For each language, upload the new file to Azure blob storage in the `enterprise`

After uploading the new files, you will need to purge the Fastly cache for the single page. From Okta, go to Fastly and select `docs`. Click `Purge` then `Purge URL`. If you need to purge a whole path, just do a `Purge All`

![](/assets/images/fastly_purge.jpg)
![](/contributing/images/fastly_purge.jpg)

Enter the URL or path and do a soft purge.

![](/assets/images/fastly_purge_url.jpg)
![](/contributing/images/fastly_purge_url.jpg)
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ If you aren't comfortable going through the steps alone, sync up with a docs eng
```
script/update-enterprise-dates.js
```
- [ ] Create REST files based on previous version. Copy the latest GHES version of the decorate file from `lib/rest/static/decorated` to a new file in the same directory for the new GHES release. Ex, `cp lib/rest/static/decorated/ghes-3.4.json lib/rest/static/decorated/ghes-3.5.json`.
- [ ] Create REST files based on previous version. Copy the latest GHES version of the decorate file from `src/rest/data` to a new file in the same directory for the new GHES release. Ex, `cp src/rest/data/ghes-3.4.json src/rest/data/ghes-3.5.json`.
- [ ] Create GraphQL files based on previous version:
Expand Down Expand Up @@ -94,14 +94,14 @@ This file should be automatically updated, but you can also run `script/update-e
- [ ] [Freeze the repos](https://github.com/github/docs-content/blob/main/docs-content-docs/docs-content-workflows/freezing.md) at least 1-2 days before the release, and post an announcement in Slack so everybody knows. It's helpful to freeze the repos before doing the OpenAPI merges to avoid changes to the megabranch while preparing and deploying.
- [ ] Alert the Neon Squad (formally docs-ecosystem team) 1-2 days before the release to deploy to `github/github`. A PR should already be open in `github/github`, to change the OpenAPI schema config `published` to `true` in `app/api/description/config/releases/ghes-<NEXT RELEASE NUMBER>.yaml`. They will need to:
- [ ] Get the required approval from `@github/ecosystem-api-reviewers` then deploy the PR to dotcom. This process generally takes 30-90 minutes.
- [ ] Once the PR merges, make sure that the auto-generated PR titled "Update OpenAPI Descriptions" in doc-internal contains the decorated JSON files for the new GHES release. If everything looks good, merge the "Update OpenAPI Description" PR into the GHES release megabranch. **Note:** Don't attempt to resolve conflicts for changes to the `lib/rest/static/decorated` files. Instead delete the existing OpenAPI files for the release version from the megabranch (that is, revert the changes to the `lib/rest/static` decorated JSON files, e.g., from the megabranch do a `git checkout origin/main lib/rest/static/*`), so there are no conflicts to resolve and to ensure that the incoming artifacts are the correct ones.
- [ ] Once the PR merges, make sure that the auto-generated PR titled "Update OpenAPI Descriptions" in doc-internal contains the decorated JSON files for the new GHES release. If everything looks good, merge the "Update OpenAPI Description" PR into the GHES release megabranch. **Note:** Don't attempt to resolve conflicts for changes to the `src/rest/data` files. Instead delete the existing OpenAPI files for the release version from the megabranch (that is, revert the changes to the `src/rest/data` decorated JSON files, e.g., from the megabranch do a `git checkout origin/main src/rest/data/*`), so there are no conflicts to resolve and to ensure that the incoming artifacts are the correct ones.
- [ ] Alert the Ecosystem-API team in #ecosystem-api about the pending release freeze and incoming blocking review of OpenAPI updates in the public REST API description (the `rest-api-descriptions` repo). They'll need to block any future "Update OpenAPI Descriptions" PRs in the public REST API description until after the ship.
- [ ] Add a blocking review to the auto-generated "Update OpenAPI Descriptions" PR in the public REST API description. (You or they will remove this blocking review once the GHES release ships.)
### 🚢 🛳️ 🚢 Shipping the release branch
- [ ] The megabranch creator should push the search index LFS objects for the public `github/docs` repo. The LFS objects were already pushed for the internal repo after the `sync-english-index-for-<PLAN@RELEASE>` was added to the megabranch. To push the LFS objects to the public repo:
- [ ] Sync the search indices for the new release:
1. First navigate to the [sync search indices workflow](https://github.com/github/docs-internal/actions/workflows/sync-search-indices.yml).
2. Then, to run the workflow with parameters, click on `Run workflow` button.
3. A modal will pop up where you will set the following inputs:
Expand Down
44 changes: 44 additions & 0 deletions .github/actions-scripts/find-past-built-pr.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#!/usr/bin/env node
import got from 'got'

import { setOutput } from '@actions/core'

import github from '../../script/helpers/github.js'
import { getActionContext } from './lib/action-context.js'

async function main() {
const sha = await getBuiltSHA()
console.log({ sha })

const actionContext = getActionContext()
const { owner, repo } = actionContext

const octokit = github()
let number = ''

const q = `${sha} repo:"${owner}/${repo}"`
const { data } = await octokit.rest.search.issuesAndPullRequests({ q })
for (const issue of data.items) {
// console.log(issue)
console.log('ID:', issue.id)
console.log('Number:', issue.number)
console.log('URL:', issue.html_url)
number = issue.number
if (number) {
break
}
}

setOutput('number', number)
}

async function getBuiltSHA() {
const r = await got('https://docs.github.com/_build')
const sha = r.body.trim()
if (!/[a-f0-9]{40}/.test(sha)) {
throw new Error(`Response body does not look like a SHA ('${r.body.slice(0, 100)}'...)`)
}
return sha
}

main()
68 changes: 64 additions & 4 deletions .github/actions-scripts/rendered-content-link-checker.js
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,12 @@ async function main(core, octokit, uploadArtifact, opts = {}) {
)
}
}
} else {
// It might be that the PR got a comment about >0 flaws before,
// and now it can update that comment to say all is well again.
if (shouldComment) {
await commentOnPR(core, octokit, flaws, opts)
}
}
}

Expand Down Expand Up @@ -327,7 +333,7 @@ async function linkReports(core, octokit, newReport, opts) {

const [owner, repo] = reportRepository.split('/')

core.debug('Attempting to link reports...')
core.info('Attempting to link reports...')
// Find previous broken link report issue
let previousReports
try {
Expand All @@ -346,7 +352,7 @@ async function linkReports(core, octokit, newReport, opts) {
core.setFailed('Error listing issues for repo')
throw error
}
core.debug(`Found ${previousReports.length} previous reports`)
core.info(`Found ${previousReports.length} previous reports`)

if (previousReports.length <= 1) {
core.info('No previous reports to link to')
Expand Down Expand Up @@ -422,10 +428,48 @@ async function commentOnPR(core, octokit, flaws, opts) {
return
}

const findAgainSymbol = '<!-- rendered-content-link-checker-comment-finder -->'

const body = flawIssueDisplay(flaws, opts, false)

const { data } = await octokit.rest.issues.listComments({
owner,
repo,
issue_number: pullNumber,
})
let previousCommentId
for (const { body, id } of data) {
if (body.includes(findAgainSymbol)) {
previousCommentId = id
}
}

// Since failed external urls aren't included in PR comment, body may be empty
if (!body) {
core.info('No flaws qualify for comment')

if (previousCommentId) {
const nothingComment = 'Previous broken links comment now moot. 👌😙'
await octokit.rest.issues.updateComment({
owner,
repo,
comment_id: previousCommentId,
body: `${nothingComment}\n\n${findAgainSymbol}`,
})
core.info(`Updated comment on PR: ${pullNumber} (${previousCommentId})`)
}
return
}

if (previousCommentId) {
const noteComment = '(*The original automated comment was updated*)'
await octokit.rest.issues.updateComment({
owner,
repo,
comment_id: previousCommentId,
body: `${body}\n\n${noteComment}\n\n${findAgainSymbol}`,
})
core.info(`Updated comment on PR: ${pullNumber} (${previousCommentId})`)
return
}

Expand All @@ -434,7 +478,7 @@ async function commentOnPR(core, octokit, flaws, opts) {
owner,
repo,
issue_number: pullNumber,
body,
body: `${body}\n\n${findAgainSymbol}`,
})
core.info(`Created comment on PR: ${pullNumber}`)
} catch (error) {
Expand Down Expand Up @@ -607,7 +651,13 @@ async function processPermalink(core, permalink, page, pageMap, redirects, opts,
patient,
externalServerErrorsAsWarning,
} = opts
const html = await renderInnerHTML(page, permalink)
let html = ''
try {
html = await renderInnerHTML(page, permalink)
} catch (error) {
console.warn(`The error happened trying to render ${page.relativePath}`)
throw error
}
const $ = cheerio.load(html, { xmlMode: true })
const flaws = []
const links = []
Expand Down Expand Up @@ -819,6 +869,16 @@ async function checkHrefLink(
// But, if that link was a redirect, that would have been left
// untouched.
if (pathname.endsWith('/')) {
const whatifPathname = pathname.slice(0, -1)
if (getRedirect(whatifPathname, { redirects, pages: pageMap })) {
return {
WARNING: `Redirect to ${getRedirect(whatifPathname, { redirects, pages: pageMap })}`,
}
} else if (!pageMap[whatifPathname]) {
if (!deprecatedVersionPrefixesRegex.test(whatifPathname)) {
return { CRITICAL: 'Broken link' }
}
}
return { WARNING: 'Links with a trailing / will always redirect' }
} else {
if (pathname.split('/')[1] in STATIC_PREFIXES) {
Expand Down
Loading

0 comments on commit f24032c

Please sign in to comment.