Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
dataders authored Apr 10, 2024
2 parents 7dc9428 + a95bc05 commit 35f29f4
Show file tree
Hide file tree
Showing 10 changed files with 141 additions and 135 deletions.
119 changes: 0 additions & 119 deletions .circleci/config.yml

This file was deleted.

60 changes: 60 additions & 0 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: Integration Testing

on:
push:
branches: [ "main" ]
pull_request_target:
branches: [ "main" ]

jobs:
build:

runs-on: ubuntu-latest
environment:
name: ci_testing
strategy:
fail-fast: true
max-parallel: 3
matrix:
python-version: [ "3.11"] # "3.10", "3.12"]
dbt-version: ["1.7.0"] # "1.6.0", , "1.8.0b1"]
data-platform: ["redshift", "snowflake", "bigquery"]

steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}
run: |
python -m pip install --upgrade pip
python -m pip install "dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}" "dbt-core~=${{ matrix.dbt-version }}"
- name: run integration_tests project on ${{ matrix.data-platform }}
run: |
cd integration_tests
export DBT_PROFILES_DIR=.
dbt deps --target ${{ matrix.data-platform }}
dbt seed --full-refresh --target ${{ matrix.data-platform }}
dbt run-operation prep_external --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }}
dbt test --target ${{ matrix.data-platform }}
env:
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }}
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }}
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }}
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@
**/logs/
**/env/
**/venv/
**/test.env
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
{{ external_schema }}
from data catalog
database '{{ external_schema }}'
iam_role '{{ env_var("SPECTRUM_IAM_ROLE", "") }}'
iam_role '{{ env_var("REDSHIFT_SPECTRUM_IAM_ROLE") }}'
create external database if not exists;

{% endset %}
Expand Down
10 changes: 10 additions & 0 deletions integration_tests/models/plugins/bigquery/bigquery_external.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,16 @@ sources:
columns: *cols-of-the-people
tests: *equal-to-the-people

- name: people_csv_with_max_staleness
external:
location: 'gs://dbt-external-tables-testing/csv/*'
options:
format: csv
skip_leading_rows: 1
max_staleness: INTERVAL 1 HOUR
columns: *cols-of-the-people
tests: *equal-to-the-people

# - name: people_json_unpartitioned
# external: &json-people
# location: 'gs://dbt-external-tables-testing/json/*'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,28 @@ integration_tests:
type: snowflake
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASSWORD') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}"
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DATABASE') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WAREHOUSE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}"
schema: dbt_external_tables_integration_tests_snowflake
threads: 1

bigquery:
type: bigquery
method: service-account
keyfile: "{{ env_var('BIGQUERY_SERVICE_KEY_PATH') }}"
project: "{{ env_var('BIGQUERY_TEST_DATABASE') }}"
method: service-account-json
keyfile_json:
type: "service_account"
project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}"
private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}"
client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}"
client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}"
auth_uri: "https://accounts.google.com/o/oauth2/auth"
token_uri: "https://oauth2.googleapis.com/token"
auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs"
client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}"
project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
schema: dbt_external_tables_integration_tests_bigquery
threads: 1

Expand All @@ -44,17 +54,17 @@ integration_tests:
method: odbc
port: 443
driver: "{{ env_var('ODBC_DRIVER') }}"
host: "{{ env_var('DBT_DATABRICKS_HOST_NAME') }}"
endpoint: "{{ env_var('DBT_DATABRICKS_ENDPOINT') }}"
token: "{{ env_var('DBT_DATABRICKS_TOKEN') }}"
host: "{{ env_var('DATABRICKS_TEST_HOST') }}"
endpoint: "{{ env_var('DATBRICKS_TEST_ENDPOINT') }}"
token: "{{ env_var('DATABRICKS_TOKEN') }}"
schema: dbt_external_tables_integration_tests_databricks

synapse:
type: synapse
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('DBT_SYNAPSE_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('DBT_SYNAPSE_DB') }}"
host: "{{ env_var('SYNAPSE_TEST_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('SYNAPSE_TEST_DBNAME') }}"
authentication: CLI
schema: dbt_external_tables_integration_tests_synapse
threads: 1
Expand All @@ -63,8 +73,8 @@ integration_tests:
type: sqlserver
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('DBT_AZURESQL_SERVER') }}"
database: "{{ env_var('DBT_AZURESQL_DB') }}"
host: "{{ env_var('AZURESQL_TEST_SERVER') }}"
database: "{{ env_var('AZURESQL_TEST_DBNAME') }}"
authentication: CLI
schema: dbt_external_tables_integration_tests_azuresql
threads: 1
41 changes: 41 additions & 0 deletions integration_tests/test.env.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# gh secret set -f integration_tests/test.env -e ci_testing

# redshift
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=
REDSHIFT_TEST_PASS=
REDSHIFT_TEST_DBNAME=
REDSHIFT_TEST_PORT=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake

SNOWFLAKE_TEST_ACCOUNT=
SNOWFLAKE_TEST_USER=
SNOWFLAKE_TEST_PASS=
SNOWFLAKE_TEST_ROLE=
SNOWFLAKE_TEST_DBNAME=
SNOWFLAKE_TEST_WHNAME=

# bigquery
BIGQUERY_PRIVATE_KEY=
BIGQUERY_PRIVATE_KEY_ID=
BIGQUERY_CLIENT_EMAIL=
BIGQUERY_CLIENT_ID=
BIGQUERY_TEST_PROJECT=

# databricks
DATABRICKS_TEST_HOST=
DATBRICKS_TEST_ENDPOINT=
DATABRICKS_TOKEN=

# msft
SYNAPSE_TEST_SERVER=
SYNAPSE_TEST_DBNAME=
SYNAPSE_TEST_USER=
SYNAPSE_TEST_PASS=

AZURESQL_TEST_SERVER=
AZURESQL_TEST_DBNAME=
AZURESQL_TEST_USER=
AZURESQL_TEST_PASS=
2 changes: 1 addition & 1 deletion macros/plugins/bigquery/create_external_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
{%- endset -%}

{% set schema_exists_query %}
select * from {{ source_node.database }}.INFORMATION_SCHEMA.SCHEMATA where schema_name = '{{ source_node.schema }}' limit 1
select * from `{{ source_node.database }}`.INFORMATION_SCHEMA.SCHEMATA where schema_name = '{{ source_node.schema }}' limit 1
{% endset %}
{% if execute %}
{% set schema_exists = run_query(schema_exists_query)|length > 0 %}
Expand Down
3 changes: 2 additions & 1 deletion macros/plugins/bigquery/create_external_table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
{%- set external = source_node.external -%}
{%- set partitions = external.partitions -%}
{%- set options = external.options -%}
{%- set non_string_options = ['max_staleness'] %}

{% if options is mapping and options.get('connection_name', none) %}
{% set connection_name = options.pop('connection_name') %}
Expand Down Expand Up @@ -38,7 +39,7 @@
uris = [{%- for uri in uris -%} '{{uri}}' {{- "," if not loop.last}} {%- endfor -%}]
{%- if options is mapping -%}
{%- for key, value in options.items() if key != 'uris' %}
{%- if value is string -%}
{%- if value is string and key not in non_string_options -%}
, {{key}} = '{{value}}'
{%- else -%}
, {{key}} = {{value}}
Expand Down
2 changes: 2 additions & 0 deletions run_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ if [[ ! -f $VENV ]]; then
else
echo "Installing dbt-$1"
pip install dbt-$1 --upgrade --pre
# remove the protobuf installation when all the dbt-provider packaged are updated with dbt core 1.7.9
pip install protobuf==4.25.3
fi
fi

Expand Down

0 comments on commit 35f29f4

Please sign in to comment.