Skip to content

Commit

Permalink
Merge branch 'main' into feature/update-table-columns
Browse files Browse the repository at this point in the history
  • Loading branch information
dataders authored Apr 11, 2024
2 parents c8c471d + 58fc3ce commit 2bd4a35
Show file tree
Hide file tree
Showing 12 changed files with 149 additions and 139 deletions.
119 changes: 0 additions & 119 deletions .circleci/config.yml

This file was deleted.

60 changes: 60 additions & 0 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: Integration Testing

on:
push:
branches: [ "main" ]
pull_request_target:
branches: [ "main" ]

jobs:
build:

runs-on: ubuntu-latest
environment:
name: ci_testing
strategy:
fail-fast: true
max-parallel: 3
matrix:
python-version: [ "3.11"] # "3.10", "3.12"]
dbt-version: ["1.7.0"] # "1.6.0", , "1.8.0b1"]
data-platform: ["redshift", "snowflake", "bigquery"]

steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}
run: |
python -m pip install --upgrade pip
python -m pip install "dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}" "dbt-core~=${{ matrix.dbt-version }}"
- name: run integration_tests project on ${{ matrix.data-platform }}
run: |
cd integration_tests
export DBT_PROFILES_DIR=.
dbt deps --target ${{ matrix.data-platform }}
dbt seed --full-refresh --target ${{ matrix.data-platform }}
dbt run-operation prep_external --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }}
dbt test --target ${{ matrix.data-platform }}
env:
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }}
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }}
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }}
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@
**/logs/
**/env/
**/venv/
**/test.env
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
{{ external_schema }}
from data catalog
database '{{ external_schema }}'
iam_role '{{ env_var("SPECTRUM_IAM_ROLE", "") }}'
iam_role '{{ env_var("REDSHIFT_SPECTRUM_IAM_ROLE") }}'
create external database if not exists;

{% endset %}
Expand Down
10 changes: 10 additions & 0 deletions integration_tests/models/plugins/bigquery/bigquery_external.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,16 @@ sources:
columns: *cols-of-the-people
tests: *equal-to-the-people

- name: people_csv_with_max_staleness
external:
location: 'gs://dbt-external-tables-testing/csv/*'
options:
format: csv
skip_leading_rows: 1
max_staleness: INTERVAL 1 HOUR
columns: *cols-of-the-people
tests: *equal-to-the-people

# - name: people_json_unpartitioned
# external: &json-people
# location: 'gs://dbt-external-tables-testing/json/*'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,28 @@ integration_tests:
type: snowflake
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASSWORD') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}"
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DATABASE') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WAREHOUSE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}"
schema: dbt_external_tables_integration_tests_snowflake
threads: 1

bigquery:
type: bigquery
method: service-account
keyfile: "{{ env_var('BIGQUERY_SERVICE_KEY_PATH') }}"
project: "{{ env_var('BIGQUERY_TEST_DATABASE') }}"
method: service-account-json
keyfile_json:
type: "service_account"
project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}"
private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}"
client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}"
client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}"
auth_uri: "https://accounts.google.com/o/oauth2/auth"
token_uri: "https://oauth2.googleapis.com/token"
auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs"
client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}"
project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
schema: dbt_external_tables_integration_tests_bigquery
threads: 1

Expand All @@ -44,17 +54,17 @@ integration_tests:
method: odbc
port: 443
driver: "{{ env_var('ODBC_DRIVER') }}"
host: "{{ env_var('DBT_DATABRICKS_HOST_NAME') }}"
endpoint: "{{ env_var('DBT_DATABRICKS_ENDPOINT') }}"
token: "{{ env_var('DBT_DATABRICKS_TOKEN') }}"
host: "{{ env_var('DATABRICKS_TEST_HOST') }}"
endpoint: "{{ env_var('DATBRICKS_TEST_ENDPOINT') }}"
token: "{{ env_var('DATABRICKS_TOKEN') }}"
schema: dbt_external_tables_integration_tests_databricks

synapse:
type: synapse
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('DBT_SYNAPSE_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('DBT_SYNAPSE_DB') }}"
host: "{{ env_var('SYNAPSE_TEST_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('SYNAPSE_TEST_DBNAME') }}"
authentication: CLI
schema: dbt_external_tables_integration_tests_synapse
threads: 1
Expand All @@ -63,8 +73,8 @@ integration_tests:
type: sqlserver
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('DBT_AZURESQL_SERVER') }}"
database: "{{ env_var('DBT_AZURESQL_DB') }}"
host: "{{ env_var('AZURESQL_TEST_SERVER') }}"
database: "{{ env_var('AZURESQL_TEST_DBNAME') }}"
authentication: CLI
schema: dbt_external_tables_integration_tests_azuresql
threads: 1
41 changes: 41 additions & 0 deletions integration_tests/test.env.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# gh secret set -f integration_tests/test.env -e ci_testing

# redshift
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=
REDSHIFT_TEST_PASS=
REDSHIFT_TEST_DBNAME=
REDSHIFT_TEST_PORT=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake

SNOWFLAKE_TEST_ACCOUNT=
SNOWFLAKE_TEST_USER=
SNOWFLAKE_TEST_PASS=
SNOWFLAKE_TEST_ROLE=
SNOWFLAKE_TEST_DBNAME=
SNOWFLAKE_TEST_WHNAME=

# bigquery
BIGQUERY_PRIVATE_KEY=
BIGQUERY_PRIVATE_KEY_ID=
BIGQUERY_CLIENT_EMAIL=
BIGQUERY_CLIENT_ID=
BIGQUERY_TEST_PROJECT=

# databricks
DATABRICKS_TEST_HOST=
DATBRICKS_TEST_ENDPOINT=
DATABRICKS_TOKEN=

# msft
SYNAPSE_TEST_SERVER=
SYNAPSE_TEST_DBNAME=
SYNAPSE_TEST_USER=
SYNAPSE_TEST_PASS=

AZURESQL_TEST_SERVER=
AZURESQL_TEST_DBNAME=
AZURESQL_TEST_USER=
AZURESQL_TEST_PASS=
3 changes: 2 additions & 1 deletion macros/plugins/bigquery/create_external_table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
{%- set external = source_node.external -%}
{%- set partitions = external.partitions -%}
{%- set options = external.options -%}
{%- set non_string_options = ['max_staleness'] %}

{% if options is mapping and options.get('connection_name', none) %}
{% set connection_name = options.pop('connection_name') %}
Expand Down Expand Up @@ -37,7 +38,7 @@
uris = [{%- for uri in uris -%} '{{uri}}' {{- "," if not loop.last}} {%- endfor -%}]
{%- if options is mapping -%}
{%- for key, value in options.items() if key != 'uris' %}
{%- if value is string -%}
{%- if value is string and key not in non_string_options -%}
, {{key}} = '{{value}}'
{%- else -%}
, {{key}} = {{value}}
Expand Down
3 changes: 3 additions & 0 deletions macros/plugins/snowflake/create_external_table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@
{% if external.auto_refresh in (true, false) -%}
auto_refresh = {{external.auto_refresh}}
{%- endif %}
{% if external.table_format | lower == "delta" %}
refresh_on_create = false
{% endif %}
{% if external.pattern -%} pattern = '{{external.pattern}}' {%- endif %}
{% if external.integration -%} integration = '{{external.integration}}' {%- endif %}
file_format = {{external.file_format}}
Expand Down
3 changes: 2 additions & 1 deletion macros/plugins/snowflake/refresh_external_table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@

{% set auto_refresh = external.get('auto_refresh', false) %}
{% set partitions = external.get('partitions', none) %}
{% set delta_format = (external.table_format | lower == "delta") %}

{% set manual_refresh = (partitions and not auto_refresh) %}
{% set manual_refresh = not auto_refresh %}

{% if manual_refresh %}

Expand Down
1 change: 1 addition & 0 deletions macros/plugins/snowflake/snowpipe/create_empty_table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
{% endif %}
metadata_filename varchar,
metadata_file_row_number bigint,
metadata_file_last_modified timestamp,
_dbt_copied_at timestamp
);

Expand Down
Loading

0 comments on commit 2bd4a35

Please sign in to comment.