Skip to content

Commit 402400a

Browse files
committed
Additional test adjustments
1 parent e331d8b commit 402400a

File tree

5 files changed

+33
-23
lines changed

5 files changed

+33
-23
lines changed

docs/website/docs/dlt-ecosystem/destinations/destination.md

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -127,35 +127,31 @@ There are multiple ways to pass the custom destination function to the `dlt` pip
127127
p = dlt.pipeline("my_pipe", destination=my_destination(api_key=os.getenv("API_KEY"))) # type: ignore[call-arg]
128128
```
129129

130-
- Directly via destination reference. In this case, don't use the decorator for the destination function.
130+
- Via the function that initializes the destination. In this case, don't use the decorator for the destination function.
131131
```py
132132
# File my_destination.py
133133

134-
from dlt.common.destination import Destination
135-
136134
# Don't use the decorator
137135
def local_destination_func(items: TDataItems, table: TTableSchema) -> None:
138136
...
139137

140-
# Via destination reference
138+
# Via the function that initializes the destination
141139
p = dlt.pipeline(
142140
"my_pipe",
143-
destination=Destination.from_reference(
144-
"destination", destination_callable=local_destination_func
141+
destination=dlt.destination(
142+
"my_destination", destination_callable=local_destination_func
145143
)
146144
)
147145
```
148146
- Via a fully qualified string to function location (this can be set in `config.toml` or through environment variables). The destination function should be located in another file.
149147
```py
150148
# File my_pipeline.py
151149

152-
from dlt.common.destination import Destination
153-
154150
# Fully qualified string to function location
155151
p = dlt.pipeline(
156152
"my_pipe",
157-
destination=Destination.from_reference(
158-
"destination", destination_callable="my_destination.local_destination_func"
153+
destination=dlt.destination(
154+
"my_destination", destination_callable="my_destination.local_destination_func"
159155
)
160156
)
161157
```

docs/website/docs/general-usage/destination.md

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,7 @@ To configure multiple destinations within a pipeline, you need to provide the cr
116116

117117
```toml
118118
[destination.destination_one]
119+
destination_type = "bigquery"
119120
location = "US"
120121
[destination.destination_one.credentials]
121122
project_id = "please set me up!"
@@ -126,15 +127,11 @@ client_email = "please set me up!"
126127
You can then use this destination in your pipeline as follows:
127128
```py
128129
import dlt
129-
from dlt.common.destination import Destination
130130

131131
# Configure the pipeline to use the "destination_one" BigQuery destination
132132
pipeline = dlt.pipeline(
133133
pipeline_name='pipeline',
134-
destination=Destination.from_reference(
135-
"bigquery",
136-
destination_name="destination_one"
137-
),
134+
destination=dlt.destination("destination_one"),
138135
dataset_name='dataset_name'
139136
)
140137
```

tests/load/duckdb/test_duckdb_client.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,12 @@ def test_credentials_wrong_config() -> None:
307307

308308

309309
@pytest.mark.no_load
310-
def test_duckdb_in_memory_mode_via_factory():
310+
@pytest.mark.parametrize(
311+
"use_dest_decorator",
312+
[True, False],
313+
ids=["using_dest_decorator", "without_using_dest_decorator"],
314+
)
315+
def test_duckdb_in_memory_mode_via_factory(use_dest_decorator: bool):
311316
import duckdb
312317

313318
# Check if passing external duckdb connection works fine
@@ -334,9 +339,12 @@ def test_duckdb_in_memory_mode_via_factory():
334339
assert isinstance(exc.value.exception, InvalidInMemoryDuckdbCredentials)
335340

336341
with pytest.raises(PipelineStepFailed) as exc:
342+
# NOTE: using dlt.destination as factory initializer and Destination.from_reference
343+
# should behave the same
344+
dest_ref_func = dlt.destination if use_dest_decorator else Destination.from_reference
337345
p = dlt.pipeline(
338346
pipeline_name="booboo",
339-
destination=Destination.from_reference("duckdb", credentials=":memory:"),
347+
destination=dest_ref_func("duckdb", credentials=":memory:"),
340348
)
341349
p.run([1, 2, 3], table_name="numbers")
342350

tests/load/pipeline/test_postgres.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,20 +58,30 @@ def test_postgres_encoded_binary(
5858

5959
# do not remove - it allows us to filter tests by destination
6060
@pytest.mark.no_load
61+
@pytest.mark.parametrize(
62+
"use_dest_decorator",
63+
[True, False],
64+
ids=["using_dest_decorator", "without_using_dest_decorator"],
65+
)
6166
@pytest.mark.parametrize(
6267
"destination_config",
6368
destinations_configs(default_sql_configs=True, subset=["postgres"]),
6469
ids=lambda x: x.name,
6570
)
6671
def test_pipeline_explicit_destination_credentials(
72+
use_dest_decorator: bool,
6773
destination_config: DestinationTestConfiguration,
6874
) -> None:
6975
from dlt.destinations import postgres
7076
from dlt.destinations.impl.postgres.configuration import PostgresCredentials
7177

78+
# NOTE: using dlt.destination as factory initializer and Destination.from_reference
79+
# should behave the same
80+
dest_ref_func = dlt.destination if use_dest_decorator else Destination.from_reference
81+
7282
# explicit credentials resolved
7383
p = dlt.pipeline(
74-
destination=Destination.from_reference(
84+
destination=dest_ref_func(
7585
"postgres",
7686
destination_name="mydest",
7787
credentials="postgresql://loader:loader@localhost:7777/dlt_data",
@@ -84,7 +94,7 @@ def test_pipeline_explicit_destination_credentials(
8494
# explicit credentials resolved ignoring the config providers
8595
os.environ["DESTINATION__MYDEST__CREDENTIALS__HOST"] = "HOST"
8696
p = dlt.pipeline(
87-
destination=Destination.from_reference(
97+
destination=dest_ref_func(
8898
"postgres",
8999
destination_name="mydest",
90100
credentials="postgresql://loader:loader@localhost:5432/dlt_data",
@@ -97,7 +107,7 @@ def test_pipeline_explicit_destination_credentials(
97107
os.environ["DESTINATION__MYDEST__CREDENTIALS__USERNAME"] = "UN"
98108
os.environ["DESTINATION__MYDEST__CREDENTIALS__PASSWORD"] = "PW"
99109
p = dlt.pipeline(
100-
destination=Destination.from_reference(
110+
destination=dest_ref_func(
101111
"postgres",
102112
destination_name="mydest",
103113
credentials="postgresql://localhost:5432/dlt_data",

tests/pipeline/test_pipeline_state.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
from dlt.common.storages import FileStorage
1717
from dlt.common.storages.load_package import TPipelineStateDoc
1818
from dlt.common.utils import uniq_id
19-
from dlt.common.destination import Destination
2019
from dlt.common.destination.client import StateInfo
2120
from dlt.common.validation import validate_dict
2221

@@ -103,8 +102,8 @@ def test_state_repr() -> None:
103102
def test_restore_state_props() -> None:
104103
p = dlt.pipeline(
105104
pipeline_name="restore_state_props",
106-
destination=Destination.from_reference("redshift", destination_name="redshift_name"),
107-
staging=Destination.from_reference("filesystem", destination_name="filesystem_name"),
105+
destination=dlt.destination("redshift_name", destination_type="redshift"),
106+
staging=dlt.destination("filesystem_name", destination_type="filesystem"),
108107
dataset_name="the_dataset",
109108
)
110109
print(get_dlt_pipelines_dir())

0 commit comments

Comments
 (0)