Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,6 @@ test-load-local-postgres:
test-common:
uv run pytest tests/common tests/normalize tests/extract tests/pipeline tests/reflection tests/sources tests/workspace tests/load/test_dummy_client.py tests/libs tests/destinations

reset-test-storage:
-rm -r _storage
mkdir _storage
python3 tests/tools/create_storages.py

build-library: dev
uv version
uv build
Expand Down
2 changes: 2 additions & 0 deletions dlt/_workspace/cli/_deploy_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ def __init__(
run_on_push: bool = False,
run_manually: bool = False,
branch: Optional[str] = None,
**kwargs: Any,
):
super().__init__(pipeline_script_path, location, branch)
self.schedule = schedule
Expand Down Expand Up @@ -264,6 +265,7 @@ def __init__(
location: str,
branch: Optional[str] = None,
secrets_format: Optional[str] = None,
**kwargs: Any,
):
super().__init__(pipeline_script_path, location, branch)
self.secrets_format = secrets_format
Expand Down
16 changes: 14 additions & 2 deletions dlt/_workspace/cli/_dlt.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import sys
from typing import Any, Sequence, Type, cast, List, Dict, Tuple
import argparse
import click
import rich_argparse
from rich.markdown import Markdown

Expand Down Expand Up @@ -134,6 +134,15 @@ def _create_parser() -> Tuple[argparse.ArgumentParser, Dict[str, SupportsCliComm
" clear enough."
),
)
parser.add_argument(
"--no-pwd",
default=False,
action="store_true",
help=(
"Do not add current working directory to sys.path. By default $pwd is added to "
"reproduce Python behavior when running scripts."
),
)
subparsers = parser.add_subparsers(title="Available subcommands", dest="command")

# load plugins
Expand Down Expand Up @@ -190,6 +199,9 @@ def main() -> int:
# switch to non-interactive if tty not connected
with maybe_no_stdin():
display_run_context_info()
if not args.no_pwd:
if "" not in sys.path:
sys.path.insert(0, "")
cmd.execute(args)
except Exception as ex:
docs_url = cmd.docs_url if hasattr(cmd, "docs_url") else DEFAULT_DOCS_URL
Expand All @@ -204,7 +216,7 @@ def main() -> int:

# print exception if available
if raiseable_exception:
click.secho(str(ex), err=True, fg="red")
fmt.secho(str(ex), err=True, fg="red")

fmt.note("Please refer to our docs at '%s' for further assistance." % docs_url)
if _debug.is_debug_enabled() and raiseable_exception:
Expand Down
7 changes: 2 additions & 5 deletions dlt/_workspace/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,8 @@ def display_run_context_info() -> None:
if run_context.default_profile != run_context.profile:
# print warning
fmt.echo(
"Profile %s activated on %s"
% (
fmt.style(run_context.profile, fg="yellow", reset=True),
fmt.bold(run_context.name),
),
"Profile `%s` is active."
% (fmt.style(run_context.profile, fg="yellow", reset=True),),
err=True,
)

Expand Down
11 changes: 5 additions & 6 deletions docs/examples/custom_naming/custom_naming.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

With this example you will learn to:
* Create a naming convention module with a recommended layout
* Use naming convention by explicitly passing it to `duckdb` destination factory
* Use naming convention by explicitly passing module name to `postgres` destination factory
* Use naming convention by configuring it config.toml
* Changing the declared case sensitivity by overriding `is_case_sensitive` property
* Providing custom normalization logic by overriding `normalize_identifier` method
Expand All @@ -28,12 +28,11 @@
import dlt

if __name__ == "__main__":
# sql_cs_latin2 module
import sql_cs_latin2 # type: ignore[import-not-found]

# create postgres destination with a custom naming convention. pass sql_cs_latin2 as module
# NOTE: ql_cs_latin2 is case sensitive and postgres accepts UNICODE letters in identifiers
dest_ = dlt.destinations.postgres(naming_convention=sql_cs_latin2)
# create postgres destination with a custom naming convention. sql_cs_latin2 is an importable
# module
# import sql_cs_latin2 # is resolving in this context
dest_ = dlt.destinations.postgres(naming_convention="sql_cs_latin2")

# run a pipeline
pipeline = dlt.pipeline(
Expand Down
40 changes: 34 additions & 6 deletions docs/website/docs/general-usage/naming-convention.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,13 +134,16 @@ password="pass"
```
The snippet above demonstrates how to apply a certain naming for an example `zendesk` source.

You can use naming conventions that you created yourself or got from other users. In that case, you should pass a full Python import path to the [module that contains the naming convention](#write-your-own-naming-convention):
```toml
[schema]
naming="tests.common.cases.normalizers.sql_upper"
You can set the naming convention in your code via destination factory. This will overwrite destination's preferred convention and make it
a default one for the whole pipeline:

```py
import dlt

dest_ = dlt.destinations.postgres(naming_convention="sql_cs_v1")
```
`dlt` will import `tests.common.cases.normalizers.sql_upper` and use the `NamingConvention` class found in it as the naming convention.

You can use naming conventions that you created yourself or got from other users. In that case, you should pass a full Python import path to the [module that contains the naming convention](#write-your-own-naming-convention):

### Available naming conventions
You can pick from a few built-in naming conventions.
Expand Down Expand Up @@ -191,7 +194,32 @@ Custom naming conventions are classes that derive from `NamingConvention`, which
1. Each naming convention resides in a separate Python module (file).
2. The class is always named `NamingConvention`.

In that case, you can use a fully qualified module name in [schema configuration](#configure-naming-convention) or pass the module [explicitly](#configure-naming-convention).
In that case, you can use a fully qualified module name in [schema configuration](#configure-naming-convention) or pass the module fully qualified name [explicitly](#configure-naming-convention).

```toml
[schema]
naming="tests.common.cases.normalizers.sql_upper"
```
`dlt` will import `tests.common.cases.normalizers.sql_upper` and use the `NamingConvention` class found in it as the naming convention.

:::tip
Do not pass custom naming convention as modules if you do it explicitly. We recommend pattern below:
```py
import dlt

dest_ = dlt.destinations.postgres(naming_convention="my_package.sql_cs_latin2")
```

⛔ avoid this or you may get pickle errors ie. when using parallel normalization:
```py
import dlt

import my_package.sql_cs_latin2 # type: ignore[import-not-found]

dest_ = dlt.destinations.postgres(naming_convention=my_package.sql_cs_latin2)
```
:::


We include [two examples](../examples/custom_naming) of naming conventions that you may find useful:

Expand Down
3 changes: 2 additions & 1 deletion docs/website/docs/reference/command-line-interface.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ Creates, adds, inspects and deploys dlt pipelines. Further help is available at
**Usage**
```sh
dlt [-h] [--version] [--disable-telemetry] [--enable-telemetry]
[--non-interactive] [--debug]
[--non-interactive] [--debug] [--no-pwd]
{telemetry,schema,pipeline,init,render-docs,deploy,dashboard,ai} ...
```

Expand All @@ -45,6 +45,7 @@ dlt [-h] [--version] [--disable-telemetry] [--enable-telemetry]
* `--enable-telemetry` - Enables telemetry before command is executed
* `--non-interactive` - Non interactive mode. default choices are automatically made for confirmations and prompts.
* `--debug` - Displays full stack traces on exceptions. useful for debugging if the output is not clear enough.
* `--no-pwd` - Do not add current working directory to sys.path. by default $pwd is added to reproduce python behavior when running scripts.

**Available subcommands**
* [`telemetry`](#dlt-telemetry) - Shows telemetry status
Expand Down
4 changes: 4 additions & 0 deletions tests/workspace/cli/common/test_cli_invoke.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
assert _deploy_command.call_args[1] == {
"pipeline_script_path": "debug_pipeline.py",
"deployment_method": "github-action",
"no_pwd": False,
"repo_location": "https://github.com/dlt-hub/dlt-deploy-template.git",
"branch": None,
"command": "deploy",
Expand Down Expand Up @@ -176,6 +177,7 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
assert _deploy_command.call_args[1] == {
"pipeline_script_path": "debug_pipeline.py",
"deployment_method": "github-action",
"no_pwd": False,
"repo_location": "folder",
"branch": "branch",
"command": "deploy",
Expand All @@ -197,6 +199,7 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
assert _deploy_command.call_args[1] == {
"pipeline_script_path": "debug_pipeline.py",
"deployment_method": "airflow-composer",
"no_pwd": False,
"repo_location": "https://github.com/dlt-hub/dlt-deploy-template.git",
"branch": None,
"command": "deploy",
Expand All @@ -212,6 +215,7 @@ def test_invoke_deploy_mock(script_runner: ScriptRunner) -> None:
assert _deploy_command.call_args[1] == {
"pipeline_script_path": "debug_pipeline.py",
"deployment_method": "airflow-composer",
"no_pwd": False,
"repo_location": "https://github.com/dlt-hub/dlt-deploy-template.git",
"branch": None,
"command": "deploy",
Expand Down
Loading