Skip to content
Closed

test #47

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 55 additions & 26 deletions .github/workflows/notify_test_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,34 +57,63 @@ jobs:
await new Promise(r => setTimeout(r, 3000))

const runs = await github.request(endpoint, params)
const runID = runs.data.workflow_runs[0].id
// TODO: If no workflows were found, it's likely GitHub Actions was not enabled

if (runs.data.workflow_runs[0].head_sha != context.payload.pull_request.head.sha) {
throw new Error('There was a new unsynced commit pushed. Please retrigger the workflow.');
}

const runUrl = 'https://github.com/'
+ context.payload.pull_request.head.repo.full_name
+ '/actions/runs/'
+ runID

const name = 'Build and test'
const head_sha = context.payload.pull_request.head.sha
const status = 'queued'
let status = 'queued'

if (runs.data.workflow_runs.length === 0) {
status = 'completed'
const conclusion = 'action_required'

github.checks.create({
owner: context.repo.owner,
repo: context.repo.repo,
name: name,
head_sha: head_sha,
status: status,
conclusion: conclusion,
output: {
title: 'Workflow run detection failed',
summary: `
Unable to detect the workflow run for testing the changes in your PR.

github.checks.create({
...context.repo,
name,
head_sha,
status,
output: {
title: 'Test results',
summary: runUrl,
text: JSON.stringify({
owner: context.payload.pull_request.head.repo.owner.login,
repo: context.payload.pull_request.head.repo.name,
run_id: runID
})
1. If you did not enable GitHub Actions in your forked repository, please enable it. See also [Disabling or limiting GitHub Actions for a repository](https://docs.github.com/en/github/administering-a-repository/disabling-or-limiting-github-actions-for-a-repository) for more details.
2. It is possible your branch is based on the old \`master\` branch in Apache Spark, please sync your branch to the latest master branch. For example as below:
\`\`\`bash
git fetch upstream
git rebase upstream/master
git push origin YOUR_BRANCH --foce
\`\`\``
}
})
} else {
const runID = runs.data.workflow_runs[0].id

if (runs.data.workflow_runs[0].head_sha != context.payload.pull_request.head.sha) {
throw new Error('There was a new unsynced commit pushed. Please retrigger the workflow.');
}
})

const runUrl = 'https://github.com/'
+ context.payload.pull_request.head.repo.full_name
+ '/actions/runs/'
+ runID

github.checks.create({
owner: context.repo.owner,
repo: context.repo.repo,
name: name,
head_sha: head_sha,
status: status,
output: {
title: 'Test results',
summary: '[See test results](' + runUrl + ')',
text: JSON.stringify({
owner: context.payload.pull_request.head.repo.owner.login,
repo: context.payload.pull_request.head.repo.name,
run_id: runID
})
},
details_url: runUrl,
})
}
6 changes: 4 additions & 2 deletions .github/workflows/update_build_status.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ jobs:

// Iterator GitHub Checks in the PR
for await (const cr of checkRuns.data.check_runs) {
if (cr.name == 'Build and test') {
if (cr.name == 'Build and test' && cr.conclusion != "action_required") {
// text contains parameters to make request in JSON.
const params = JSON.parse(cr.output.text)

Expand All @@ -74,7 +74,8 @@ jobs:
check_run_id: cr.id,
output: cr.output,
status: run.data.status,
conclusion: run.data.conclusion
conclusion: run.data.conclusion,
details_url: run.data.details_url
})
} else {
console.log(' Run ' + cr.id + ': set status (' + run.data.status + ')')
Expand All @@ -84,6 +85,7 @@ jobs:
check_run_id: cr.id,
output: cr.output,
status: run.data.status,
details_url: run.data.details_url
})
}

Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# Apache Spark

asd
Disable Actions Test
Spark is a unified analytics engine for large-scale data processing. It provides
high-level APIs in Scala, Java, Python, and R, and an optimized engine that
supports general computation graphs for data analysis. It also supports a
Expand Down