Skip to content

Commit

Permalink
Replicate recent dockerfile changes into dagger (airbytehq#25653)
Browse files Browse the repository at this point in the history
* replicate dockerfile changes

* only add normalization stuff if needed

* also set entrypoint >.>

* fix order of constants

* clean up normalization mappings + fail loudly

* manually install urllib3 1.26.15

* fix declaration
  • Loading branch information
edgao authored May 4, 2023
1 parent 7310494 commit ac03566
Showing 1 changed file with 76 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -510,36 +510,72 @@ def with_integration_base_java(context: PipelineContext, build_platform: Platfor
)


BASE_DESTINATION_SPECIFIC_NORMALIZATION_DOCKERFILE_MAPPING = {
"destination-clickhouse": "clickhouse.Dockerfile",
"destination-duckdb": "duckdb.Dockerfile",
"destination-mssql": "mssql.Dockerfile",
"destination-mysql": "mysql.Dockerfile",
"destination-oracle": "oracle.Dockerfile",
"destination-tidb": "tidb.Dockerfile",
"destination-bigquery": "Dockerfile",
"destination-redshift": "redshift.Dockerfile",
"destination-snowflake": "snowflake.Dockerfile",
BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = {
"destination-bigquery": {
"dockerfile": "Dockerfile",
"dbt_adapter": "dbt-bigquery==1.0.0",
"integration_name": "bigquery",
"supports_in_connector_normalization": True
},
"destination-clickhouse": {
"dockerfile": "clickhouse.Dockerfile",
"dbt_adapter": "dbt-clickhouse>=1.4.0",
"integration_name": "clickhouse",
"supports_in_connector_normalization": False
},
"destination-duckdb": {
"dockerfile": "duckdb.Dockerfile",
"dbt_adapter": "dbt-duckdb==1.0.1",
"integration_name": "duckdb",
"supports_in_connector_normalization": False
},
"destination-mssql": {
"dockerfile": "mssql.Dockerfile",
"dbt_adapter": "dbt-sqlserver==1.0.0",
"integration_name": "mssql",
"supports_in_connector_normalization": False
},
"destination-mysql": {
"dockerfile": "mysql.Dockerfile",
"dbt_adapter": "dbt-mysql==1.0.0",
"integration_name": "mysql",
"supports_in_connector_normalization": False
},
"destination-oracle": {
"dockerfile": "oracle.Dockerfile",
"dbt_adapter": "dbt-oracle==0.4.3",
"integration_name": "oracle",
"supports_in_connector_normalization": False
},
"destination-postgres": {
"dockerfile": "Dockerfile",
"dbt_adapter": "dbt-postgres==1.0.0",
"integration_name": "postgres",
"supports_in_connector_normalization": False
},
"destination-redshift": {
"dockerfile": "redshift.Dockerfile",
"dbt_adapter": "dbt-redshift==1.0.0",
"integration_name": "redshift",
"supports_in_connector_normalization": False
},
"destination-snowflake": {
"dockerfile": "snowflake.Dockerfile",
"dbt_adapter": "dbt-snowflake==1.0.0",
"integration_name": "snowflake",
"supports_in_connector_normalization": False
},
"destination-tidb": {
"dockerfile": "tidb.Dockerfile",
"dbt_adapter": "dbt-tidb==1.0.1",
"integration_name": "tidb",
"supports_in_connector_normalization": False
},
}

BASE_DESTINATION_SPECIFIC_NORMALIZATION_ADAPTER_MAPPING = {
"destination-clickhouse": "dbt-clickhouse>=1.4.0",
"destination-duckdb": "duckdb.Dockerfile",
"destination-mssql": "dbt-sqlserver==1.0.0",
"destination-mysql": "dbt-mysql==1.0.0",
"destination-oracle": "dbt-oracle==0.4.3",
"destination-tidb": "dbt-tidb==1.0.1",
"destination-bigquery": "dbt-bigquery==1.0.0",
}

DESTINATION_SPECIFIC_NORMALIZATION_DOCKERFILE_MAPPING = {
**BASE_DESTINATION_SPECIFIC_NORMALIZATION_DOCKERFILE_MAPPING,
**{f"{k}-strict-encrypt": v for k, v in BASE_DESTINATION_SPECIFIC_NORMALIZATION_DOCKERFILE_MAPPING.items()},
}

DESTINATION_SPECIFIC_NORMALIZATION_ADAPTER_MAPPING = {
**BASE_DESTINATION_SPECIFIC_NORMALIZATION_ADAPTER_MAPPING,
**{f"{k}-strict-encrypt": v for k, v in BASE_DESTINATION_SPECIFIC_NORMALIZATION_ADAPTER_MAPPING.items()},
DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = {
**BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION,
**{f"{k}-strict-encrypt": v for k, v in BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION.items()},
}


Expand All @@ -550,9 +586,7 @@ def with_normalization(context: ConnectorContext) -> Container:
).file("sshtunneling.sh")
normalization_directory_with_build = normalization_directory.with_new_directory("build")
normalization_directory_with_sshtunneling = normalization_directory_with_build.with_file("build/sshtunneling.sh", sshtunneling_file)
normalization_dockerfile_name = DESTINATION_SPECIFIC_NORMALIZATION_DOCKERFILE_MAPPING.get(
context.connector.technical_name, "Dockerfile"
)
normalization_dockerfile_name = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["dockerfile"]
return normalization_directory_with_sshtunneling.docker_build(normalization_dockerfile_name)


Expand All @@ -565,7 +599,8 @@ def with_integration_base_java_and_normalization(context: PipelineContext, build
"git",
]

dbt_adapter_package = DESTINATION_SPECIFIC_NORMALIZATION_ADAPTER_MAPPING.get(context.connector.technical_name, "dbt-bigquery==1.0.0")
dbt_adapter_package = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["dbt_adapter"]
normalization_integration_name = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["integration_name"]

pip_cache: CacheVolume = context.dagger_client.cache_volume("pip_cache")

Expand All @@ -586,8 +621,13 @@ def with_integration_base_java_and_normalization(context: PipelineContext, build
.with_workdir("/airbyte/normalization_code")
.with_exec(["pip3", "install", "."])
.with_workdir("/airbyte/normalization_code/dbt-template/")
# amazon linux 2 isn't compatible with urllib3 2.x, so force 1.x
.with_exec(["pip3", "install", "urllib3<2"])
.with_exec(["dbt", "deps"])
.with_workdir("/airbyte")
.with_file("run_with_normalization.sh", context.get_repo_dir("airbyte-integrations/bases/base-java", include=["run_with_normalization.sh"]).file("run_with_normalization.sh"))
.with_env_variable("AIRBYTE_NORMALIZATION_INTEGRATION", normalization_integration_name)
.with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/run_with_normalization.sh")
)


Expand All @@ -603,10 +643,12 @@ async def with_airbyte_java_connector(context: ConnectorContext, connector_java_
.with_exec(["rm", "-rf", f"{application}.tar"])
)

if context.connector.supports_normalization:
if context.connector.supports_normalization and DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["supports_in_connector_normalization"]:
base = with_integration_base_java_and_normalization(context, build_platform)
entrypoint = ["/airbyte/run_with_normalization.sh"]
else:
base = with_integration_base_java(context, build_platform)
entrypoint = ["/airbyte/base.sh"]

return (
base.with_workdir("/airbyte")
Expand All @@ -616,7 +658,7 @@ async def with_airbyte_java_connector(context: ConnectorContext, connector_java_
.with_exec(["rm", "-rf", "builts_artifacts"])
.with_label("io.airbyte.version", context.metadata["dockerImageTag"])
.with_label("io.airbyte.name", context.metadata["dockerRepository"])
.with_entrypoint(["/airbyte/base.sh"])
.with_entrypoint(entrypoint)
)


Expand Down

0 comments on commit ac03566

Please sign in to comment.