Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ target/
dbt_modules/
logs/
.DS_Store
dbt_packages/
dbt_packages/
integration_tests/package-lock.yml
10 changes: 5 additions & 5 deletions integration_tests/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,15 +88,15 @@ seeds:
contact_phone:
+column_types:
index: "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"
value: "{{ 'string' if target.name in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
value: "{{ 'string' if target.type in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
contact_link:
+column_types:
contact_id: "{{ 'string' if target.name in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
link: "{{ 'string' if target.name in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
contact_id: "{{ 'string' if target.type in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
link: "{{ 'string' if target.type in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
resume:
+column_types:
id: "{{ 'string' if target.name in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
file_ext: "{{ 'string' if target.name in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
id: "{{ 'string' if target.type in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
file_ext: "{{ 'string' if target.type in ('bigquery', 'spark', 'databricks') else 'varchar' }}"
created_at: timestamp
file_uploaded_at: timestamp

Expand Down
6 changes: 3 additions & 3 deletions integration_tests/seeds/resume.csv
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
id,_fivetran_synced,candidate_id,created_at,file_download_url,file_ext,file_name,file_uploaded_at,opportunity_id
2405ee3f-8412-458a-b242-9831b6c8ea60,2020-10-18 00:21:27.606,,2020-08-15 18:52:46.221,https://api.lever.co/v1/candidates/d1fc9816-cb00-4c30-8d84-556b265e6df8/resumes/2405ee3f-8412-458a-b242-9831b6c8ea60/download,.,Resume,2020-08-15 18:52:46.408,d1fc9816-cb00-4c30-8d84-556b265e6df8
df82e2be-8c32-4834-90b3-712618513a91,2020-07-21 06:20:27.143,,2019-10-16 04:01:37.504,https://api.lever.co/v1/candidates/445178b2-4d0e-41f2-91eb-fa94ad5ac347/resumes/df82e2be-8c32-4834-90b3-712618513a91/download,.,M_resume,2019-10-16 04:01:36.687,445178b2-4d0e-41f2-91eb-fa94ad5ac347
id,_fivetran_synced,candidate_id,created_at,created_at_epoch,file_download_url,file_ext,file_name,file_uploaded_at,opportunity_id
2405ee3f-8412-458a-b242-9831b6c8ea60,2020-10-18 00:21:27.606,,2020-08-15 18:52:46.221,1597517566221,https://api.lever.co/v1/candidates/d1fc9816-cb00-4c30-8d84-556b265e6df8/resumes/2405ee3f-8412-458a-b242-9831b6c8ea60/download,.,Resume,2020-08-15 18:52:46.408,d1fc9816-cb00-4c30-8d84-556b265e6df8
df82e2be-8c32-4834-90b3-712618513a91,2020-07-21 06:20:27.143,,2019-10-16 04:01:37.504,1571198497504,https://api.lever.co/v1/candidates/445178b2-4d0e-41f2-91eb-fa94ad5ac347/resumes/df82e2be-8c32-4834-90b3-712618513a91/download,.,M_resume,2019-10-16 04:01:36.687,445178b2-4d0e-41f2-91eb-fa94ad5ac347
1 change: 1 addition & 0 deletions macros/get_resume_columns.sql
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
{% set columns = [
{"name": "_fivetran_synced", "datatype": dbt.type_timestamp()},
{"name": "created_at", "datatype": dbt.type_timestamp()},
{"name": "created_at_epoch", "datatype": dbt.type_bigint()},
{"name": "file_download_url", "datatype": dbt.type_string()},
{"name": "file_ext", "datatype": dbt.type_string()},
{"name": "file_name", "datatype": dbt.type_string()},
Expand Down
18 changes: 16 additions & 2 deletions models/stg_lever__resume.sql
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,31 @@ fields as (
from base
),

{# Figure out if the fields are Epoch or datetime timestamps #}
{%- set columns = adapter.get_columns_in_relation(ref('stg_lever__resume_tmp')) -%}
{%- set timestamp_fields = [] -%}
{%- for column in columns -%}
{%- if column.name|lower in ['created_at', 'created_at_epoch'] -%}
{%- do timestamp_fields.append({"name": column.name|lower, "is_epoch": column.is_integer()}) -%}
{%- endif -%}
{%- endfor -%}

final as (

select
id,
cast(_fivetran_synced as {{ dbt.type_timestamp() }}) as _fivetran_synced,
cast(created_at as {{ dbt.type_timestamp() }}) as created_at,

{%- for timestamp in timestamp_fields %}
cast( {{ dbt_date.from_unixtimestamp(timestamp.name, "milliseconds") if timestamp.is_epoch else timestamp.name }} as {{ dbt.type_timestamp() }}) as {{ timestamp.name }},
{%- endfor %}

file_download_url,
file_ext as file_extension,
file_name,
cast(file_uploaded_at as {{ dbt.type_timestamp() }}) as file_uploaded_at,
cast(file_uploaded_at as {{ dbt.type_timestamp() }}) as file_uploaded_at, -- would need to do the same for file_uploaded_at
opportunity_id

from fields
)

Expand Down
2 changes: 2 additions & 0 deletions packages.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
packages:
- package: fivetran/fivetran_utils
version: [">=0.4.0", "<0.5.0"]
- package: calogica/dbt_date
version: [">=0.9.0", "<1.0.0"]