Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🐛 Add method field on spec.json connectors (snowflake and postgres) #3960

Merged
merged 4 commits into from
Jun 8, 2021
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
add prop for oneOf snowflake
  • Loading branch information
marcosmarxm committed Jun 8, 2021
commit 1419edcce1d17029cfa193123807ac1d3590cb11
Original file line number Diff line number Diff line change
Expand Up @@ -79,25 +79,38 @@
"title": "Standard Inserts",
"additionalProperties": false,
"description": "Uses <pre>INSERT</pre> statements to send batches of records to Snowflake. Easiest (no setup) but not recommended for large production workloads due to slow speed.",
"required": [],
"properties": {}
"required": ["method"],
"properties": {
"method": {
"type": "string",
"enum": ["Standard"],
"default": "Standard"
}
}
},
{
"title": "AWS S3 Staging",
"additionalProperties": false,
"description": "Writes large batches of records to a file, uploads the file to S3, then uses <pre>COPY INTO table</pre> to upload the file. Recommended for large production workloads for better speed and scalability.",
"required": [
"method",
"s3_bucket_name",
"access_key_id",
"secret_access_key"
],
"properties": {
"method": {
"type": "string",
"enum": ["S3 Staging"],
"default": "S3 Staging",
"order": 0
},
"s3_bucket_name": {
"title": "S3 Bucket Name",
"type": "string",
"description": "The name of the staging S3 bucket. Airbyte will write files to this bucket and read them via <pre>COPY</pre> statements on Snowflake.",
"examples": ["airbyte.staging"],
"order": 0
"order": 1
},
"s3_bucket_region": {
"title": "S3 Bucket Region",
Expand All @@ -121,6 +134,7 @@
"ca-central-1",
"cn-north-1",
"cn-northwest-1",
"eu-central-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
Expand All @@ -129,43 +143,49 @@
"sa-east-1",
"me-south-1"
],
"order": 1
"order": 2
},
"access_key_id": {
"type": "string",
"description": "The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.",
"title": "S3 Key Id",
"airbyte_secret": true,
"order": 2
"order": 3
},
"secret_access_key": {
"type": "string",
"description": "The corresponding secret to the above access key id.",
"title": "S3 Access Key",
"airbyte_secret": true,
"order": 3
"order": 4
}
}
},
{
"title": "GCS Staging",
"additionalProperties": false,
"description": "Writes large batches of records to a file, uploads the file to GCS, then uses <pre>COPY INTO table</pre> to upload the file. Recommended for large production workloads for better speed and scalability.",
"required": ["project_id", "bucket_name", "credentials_json"],
"required": ["method","project_id", "bucket_name", "credentials_json"],
"properties": {
"method": {
"type": "string",
"enum": ["GCS Staging"],
"default": "GCS Staging",
"order": 0
},
"project_id": {
"title": "GCP Project ID",
"type": "string",
"description": "The name of the GCP project ID for your credentials.",
"examples": ["my-project"],
"order": 0
"order": 1
},
"bucket_name": {
"title": "GCS Bucket Name",
"type": "string",
"description": "The name of the staging GCS bucket. Airbyte will write files to this bucket and read them via <pre>COPY</pre> statements on Snowflake.",
"examples": ["airbyte-staging"],
"order": 1
"order": 2
},
"credentials_json": {
"title": "Google Application Credentials",
Expand Down