Skip to content

Commit

Permalink
Add Dagster Deployment Action skeleton (airbytehq#24929)
Browse files Browse the repository at this point in the history
* Add code skeleton

* Remove pr deployments

* remove connectors change

* Add action git ignore

* Update YAML to deploy properly

* Add newline at eof

* Setup python-executable deploy

* Add temporary script

* Update readme

* Remove github workflow

* Add dagger pipeline for deploy

* Add github action

* Update readme

* Remove comment

* Move script into dagger pipeline

* Add run test helper and skip test logic

* Have the dagster orchestrator properly generate the setup.py file

* Remove deploy script

* Address PR comments

* Update lock

* Automated Commit - Formatting Changes

---------

Co-authored-by: bnchrch <bnchrch@users.noreply.github.com>
  • Loading branch information
bnchrch and bnchrch authored Apr 18, 2023
1 parent 9a5cfc5 commit b6f6bae
Show file tree
Hide file tree
Showing 10 changed files with 853 additions and 274 deletions.
23 changes: 23 additions & 0 deletions .github/workflows/metadata_service_deploy_orchestrator_dagger.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: Connector metadata service deploy orchestrator

on:
workflow_dispatch:
push:
branches:
- master
paths:
- "airbyte-ci/connectors/metadata_service/orchestrator/**"
jobs:
connector_metadata_service_deploy_orchestrator:
name: Connector metadata service deploy orchestrator
runs-on: medium-runner
env:
CI_GITHUB_ACCESS_TOKEN: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
steps:
- name: Deploy the metadata orchestrator
id: metadata-orchestrator-deploy-orchestrator-pipeline
uses: ./.github/actions/run-dagger-pipeline
with:
subcommand: "metadata deploy orchestrator"
env:
DAGSTER_CLOUD_API_TOKEN: ${{ secrets.DAGSTER_CLOUD_METADATA_API_TOKEN }}
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,6 @@ docker-compose.debug.yaml
.env
.env.dev
flags.yml

# Ignore generated credentials from google-github-actions/auth
gha-creds-*.json
18 changes: 17 additions & 1 deletion airbyte-ci/connectors/metadata_service/orchestrator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Refer to the [Dagster documentation](https://docs.dagster.io/concepts) for more
### Starting the Dagster Daemons
Start the orchestrator with the following command:
```bash
poetry run dagster dev -m orchestrator
poetry run dagster dev
```

Then you can access the Dagster UI at http://localhost:3000
Expand All @@ -110,3 +110,19 @@ In some cases you may want to run the orchestrator without the UI. To learn more
poetry run pytest
```

## Deploying to Dagster Cloud manually
Note: This is a temporary solution until we have a CI/CD pipeline setup.

Getting the CICD setup is currently blocked until we hear back from Dagster on a better way to use relative imports in a Dagster Cloud Deployment.

### Installing the dagster-cloud cli
```bash
pip install dagster-cloud
dagster-cloud config
```

### Deploying the orchestrator
```bash
cd orchestrator
DAGSTER_CLOUD_API_TOKEN=<YOU-DAGSTER-CLOUD-TOKEN> airbyte-ci metadata deploy orchestrator
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
locations:
- location_name: metadata_service_orchestrator
code_source:
package_name: orchestrator
871 changes: 635 additions & 236 deletions airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ python = "^3.9"
dagit = "^1.1.21"
dagster = "^1.1.21"
pandas = "^1.5.3"
dagster-gcp = "^0.17.21"
dagster-gcp = "^0.18.6"
google = "^3.0.0"
jinja2 = "^3.1.2"
pygithub = "^1.58.0"
Expand All @@ -20,6 +20,9 @@ deepdiff = "^6.3.0"
mergedeep = "^1.3.4"
pydash = "^6.0.2"
dpath = "^2.1.5"
dagster-cloud = "^1.2.6"
grpcio = "^1.47.0"
poetry2setup = "^1.1.0"


[tool.poetry.group.dev.dependencies]
Expand All @@ -29,6 +32,10 @@ ptpython = "^3.0.23"
[tool.poetry.group.test.dependencies]
pytest = "^7.2.2"


[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

[tool.dagster]
module_name = "orchestrator"
11 changes: 9 additions & 2 deletions tools/ci_connector_ops/ci_connector_ops/pipelines/bases.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,8 @@ def __str__(self) -> str: # noqa D105
class Step(ABC):
"""An abstract class to declare and run pipeline step."""

title: ClassVar
title: ClassVar[str]
started_at: ClassVar[datetime]

def __init__(self, context: ConnectorTestContext) -> None: # noqa D107
self.context = context
Expand All @@ -87,6 +88,7 @@ async def run(self, *args, **kwargs) -> StepResult:
Returns:
StepResult: The step result following the step run.
"""
self.started_at = datetime.utcnow()
try:
return await self._run(*args, **kwargs)
except QueryError as e:
Expand Down Expand Up @@ -269,7 +271,12 @@ def print(self):
step.stylize(step_result.status.get_rich_style())
result = Text(step_result.status.value)
result.stylize(step_result.status.get_rich_style())
step_results_table.add_row(step, result, f"{round((self.created_at - step_result.created_at).total_seconds())}s")

if step_result.status is StepStatus.SKIPPED:
step_results_table.add_row(step, result, "N/A")
else:
run_time_seconds = round((step_result.created_at - step_result.step.started_at).total_seconds())
step_results_table.add_row(step, result, f"{run_time_seconds}s")

to_render = [step_results_table]
if self.failed_steps:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,3 @@ def test(
should_send=ctx.obj.get("ci_context") == CIContext.PULL_REQUEST,
logger=logger,
)


if __name__ == "__main__":
test()
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
run_metadata_orchestrator_test_pipeline,
run_metadata_upload_pipeline,
run_metadata_validation_pipeline,
run_metadata_orchestrator_deploy_pipeline,
)
from ci_connector_ops.pipelines.utils import DaggerPipelineCommand, get_all_metadata_files, get_modified_metadata_files
from rich.logging import RichHandler
Expand Down Expand Up @@ -97,6 +98,29 @@ def upload(ctx: click.Context, gcs_bucket_name: str, gcs_credentials: str, modif
)


# DEPLOY GROUP


@metadata.group(help="Commands related to deploying components of the metadata service.")
@click.pass_context
def deploy(ctx: click.Context):
pass


@deploy.command(cls=DaggerPipelineCommand, name="orchestrator", help="Deploy the metadata service orchestrator to production")
@click.pass_context
def deploy_orchestrator(ctx: click.Context):
return anyio.run(
run_metadata_orchestrator_deploy_pipeline,
ctx.obj["is_local"],
ctx.obj["git_branch"],
ctx.obj["git_revision"],
ctx.obj.get("gha_workflow_run_url"),
ctx.obj.get("pipeline_start_timestamp"),
ctx.obj.get("ci_context"),
)


# TEST GROUP


Expand All @@ -106,9 +130,9 @@ def test(ctx: click.Context):
pass


@test.command(cls=DaggerPipelineCommand, help="Run tests for the metadata service library.")
@test.command(cls=DaggerPipelineCommand, name="lib", help="Run tests for the metadata service library.")
@click.pass_context
def lib(ctx: click.Context):
def test_lib(ctx: click.Context):
return anyio.run(
run_metadata_lib_test_pipeline,
ctx.obj["is_local"],
Expand All @@ -120,9 +144,9 @@ def lib(ctx: click.Context):
)


@test.command(cls=DaggerPipelineCommand, help="Run tests for the metadata service orchestrator.")
@test.command(cls=DaggerPipelineCommand, name="orchestrator", help="Run tests for the metadata service orchestrator.")
@click.pass_context
def orchestrator(ctx: click.Context):
def test_orchestrator(ctx: click.Context):
return anyio.run(
run_metadata_orchestrator_test_pipeline,
ctx.obj["is_local"],
Expand All @@ -132,7 +156,3 @@ def orchestrator(ctx: click.Context):
ctx.obj.get("pipeline_start_timestamp"),
ctx.obj.get("ci_context"),
)


if __name__ == "__main__":
lib()
Loading

0 comments on commit b6f6bae

Please sign in to comment.