Skip to content

Commit a2b7d5b

Browse files
committed
documents deploy changes
1 parent 2372ea3 commit a2b7d5b

File tree

9 files changed

+43
-25
lines changed

9 files changed

+43
-25
lines changed

.github/workflows/test_common.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,11 +80,11 @@ jobs:
8080
run: poetry install --no-interaction -E duckdb -E cli
8181

8282
- run: |
83-
poetry run pytest tests/extract tests/pipeline && poetry run pytest tests/cli/common
83+
poetry run pytest tests/extract tests/pipeline tests/cli/common
8484
if: runner.os != 'Windows'
8585
name: Run extra tests Linux/MAC
8686
- run: |
87-
poetry run pytest tests/extract tests/pipeline && poetry run pytest tests/cli/common
87+
poetry run pytest tests/extract tests/pipeline tests/cli/common
8888
if: runner.os == 'Windows'
8989
name: Run extra tests Windows
9090
shell: cmd

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ test-local:
6161
DESTINATION__POSTGRES__CREDENTIALS=postgresql://loader:loader@localhost:5432/dlt_data DESTINATION__DUCKDB__CREDENTIALS=duckdb:///_storage/test_quack.duckdb poetry run pytest tests -k '(postgres and duckdb)'
6262

6363
test-common:
64-
poetry run pytest tests/common tests/normalize tests/extract tests/pipeline tests/reflection tests/sources && poetry run pytest tests/cli/common
64+
poetry run pytest tests/common tests/normalize tests/extract tests/pipeline tests/reflection tests/sources tests/cli/common
6565

6666
reset-test-storage:
6767
-rm -r _storage

dlt/cli/_dlt.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@
1919
from dlt.cli.telemetry_command import DLT_TELEMETRY_DOCS_URL, change_telemetry_status_command, telemetry_status_command
2020

2121
try:
22-
from dlt.cli.deploy_command import PipelineWasNotRun, deploy_command, DLT_DEPLOY_DOCS_URL, DeploymentMethods, COMMAND_DEPLOY_REPO_LOCATION, SecretFormats
22+
from dlt.cli import deploy_command
23+
from dlt.cli.deploy_command import PipelineWasNotRun, DLT_DEPLOY_DOCS_URL, DeploymentMethods, COMMAND_DEPLOY_REPO_LOCATION, SecretFormats
2324
except ModuleNotFoundError:
2425
pass
2526

@@ -57,7 +58,13 @@ def deploy_command_wrapper(pipeline_script_path: str, deployment_method: str, re
5758

5859
from git import InvalidGitRepositoryError, NoSuchPathError
5960
try:
60-
deploy_command(pipeline_script_path=pipeline_script_path, deployment_method=deployment_method, repo_location=repo_location, branch=branch, **kwargs)
61+
deploy_command.deploy_command(
62+
pipeline_script_path=pipeline_script_path,
63+
deployment_method=deployment_method,
64+
repo_location=repo_location,
65+
branch=branch,
66+
**kwargs
67+
)
6168
except (CannotRestorePipelineException, PipelineWasNotRun) as ex:
6269
click.secho(str(ex), err=True, fg="red")
6370
fmt.note("You must run the pipeline locally successfully at least once in order to deploy it.")

docs/website/docs/general-usage/credentials.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ Once you have credentials for the source and destination, add them to the file a
3838

3939
To add credentials to your deployment,
4040

41-
- either use one of the dlt deploy commands;
41+
- either use one of the `dlt deploy` commands;
4242
- or follow the below instructions to pass credentials via code or environment.
4343

4444
### Passing credentials as code

docs/website/docs/getting-started/build-a-data-platform/building-data-warehouse.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ complexity.
104104
**This is one of the main reasons we created `dlt`:**
105105

106106
- For common sources, `dlt`’s community-built and maintained pipelines can deploy to your airflow
107-
with only two CLI commands (`dlt init source`, `dlt deploy source airflow`). Maintenance is partly
107+
with only two CLI commands (`dlt init source`, `dlt deploy pipeline_script.pyu airflow-composer`). Maintenance is partly
108108
automated, and open sourced, while customisations are easy.
109109
- For simple sources, you can have an easy python function to just take any data and load it in a
110110
robust way.

docs/website/docs/reference/command-line-interface.md

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,13 @@ Shows all available verified sources and their short descriptions. For each sour
3333
and prints the relevant warning.
3434

3535
## `dlt deploy`
36-
This command prepares your pipeline for deployment and gives you step by step instruction how to accomplish it.
36+
This command prepares your pipeline for deployment and gives you step by step instruction how to accomplish it. To enabled this functionality please first execute
37+
```sh
38+
pip install "dlt[cli]"
39+
```
40+
that will add additional packages to current environment.
41+
42+
> 💡 We ask you to install those dependencies separately to keep our core library small and make it work everywhere.
3743
3844
### github-action
3945

docs/website/docs/walkthroughs/deploy-a-pipeline/deploy-with-airflow-composer.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,12 @@ python3 {pipeline_name}_pipeline.py
2626
This should successfully load data from the source to the destination once and allows `dlt` to gather required information for the deployment.
2727

2828
## 3. Initialize deployment
29+
First you need to add additional dependencies that `deploy` command requires:
30+
```bash
31+
pip install "dlt[cli]"
32+
```
2933

30-
In the same `dlt` project as your working pipeline, you can run a deployment command.
31-
34+
then:
3235
```bash
3336
dlt deploy {pipeline_name}_pipeline.py airflow-composer
3437
```

docs/website/docs/walkthroughs/deploy-a-pipeline/deploy-with-github-actions.md

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -29,16 +29,16 @@ python3 chess_pipeline.py # replace chess_pipeline.py with your pipeline file
2929
This should successfully load data from the source to the destination once.
3030

3131
## Initialize deployment
32-
33-
In the same `dlt` project as your working pipeline, you can create a deployment using
34-
[GitHub Actions](https://github.com/features/actions) that will load data with the `chess_pipeline.py` script
35-
every 30 minutes by running:
36-
32+
First you need to add additional dependencies that `deploy` command requires:
33+
```bash
34+
pip install "dlt[cli]"
35+
```
36+
then the command below will create a Github workflow that runs your pipeline script every 30 minutes:
3737
```shell
38-
dlt deploy chess_pipeline.py github-action --schedule "*/30 * * * *" # replace chess_pipeline.py
38+
dlt deploy chess_pipeline.py github-action --schedule "*/30 * * * *"
3939
```
4040

41-
This command checks that your pipeline has run successfully before and creates a GitHub Actions
41+
It checks that your pipeline has run successfully before and creates a GitHub Actions
4242
workflow file `run_chess_workflow.yml` in `.github/workflows` with the necessary environment
4343
variables.
4444

tests/cli/common/test_cli_invoke.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,8 @@ def test_invoke_deploy_project(script_runner: ScriptRunner) -> None:
8989

9090

9191
def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
92-
with patch("dlt.cli._dlt.deploy_command") as _deploy_command:
92+
# NOTE: you can mock only once per test with ScriptRunner !!
93+
with patch("dlt.cli.deploy_command.deploy_command") as _deploy_command:
9394
script_runner.run(['dlt', 'deploy', 'debug_pipeline.py', 'github-action', '--schedule', '@daily'])
9495
assert _deploy_command.called
9596
assert _deploy_command.call_args[1] == {
@@ -102,7 +103,8 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
102103
"run_manually": True,
103104
"run_on_push": False
104105
}
105-
with patch("dlt.cli._dlt.deploy_command") as _deploy_command:
106+
107+
_deploy_command.reset_mock()
106108
script_runner.run(['dlt', 'deploy', 'debug_pipeline.py', 'github-action', '--schedule', '@daily', '--location', 'folder', '--branch', 'branch', '--run-on-push'])
107109
assert _deploy_command.called
108110
assert _deploy_command.call_args[1] == {
@@ -115,14 +117,14 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
115117
"run_manually": True,
116118
"run_on_push": True
117119
}
118-
# no schedule fails
119-
with patch("dlt.cli._dlt.deploy_command") as _deploy_command:
120+
# no schedule fails
121+
_deploy_command.reset_mock()
120122
result = script_runner.run(['dlt', 'deploy', 'debug_pipeline.py', 'github-action'])
121123
assert not _deploy_command.called
122124
assert result.returncode != 0
123125
assert "the following arguments are required: --schedule" in result.stderr
124-
# airflow without schedule works
125-
with patch("dlt.cli._dlt.deploy_command") as _deploy_command:
126+
# airflow without schedule works
127+
_deploy_command.reset_mock()
126128
result = script_runner.run(['dlt', 'deploy', 'debug_pipeline.py', 'airflow-composer'])
127129
assert _deploy_command.called
128130
assert result.returncode == 0
@@ -134,8 +136,8 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
134136
"command": "deploy",
135137
'secrets_format': 'toml'
136138
}
137-
# env secrets format
138-
with patch("dlt.cli._dlt.deploy_command") as _deploy_command:
139+
# env secrets format
140+
_deploy_command.reset_mock()
139141
result = script_runner.run(['dlt', 'deploy', 'debug_pipeline.py', 'airflow-composer', "--secrets-format", "env"])
140142
assert _deploy_command.called
141143
assert result.returncode == 0

0 commit comments

Comments
 (0)