From d3857723553b625f1b895572d578cd851a0c03ca Mon Sep 17 00:00:00 2001 From: aws-sdk-python-automation Date: Tue, 6 Oct 2020 18:13:13 +0000 Subject: [PATCH 1/2] Update to latest models --- .../next-release/api-change-dms-93546.json | 5 + .../next-release/api-change-ec2-82257.json | 5 + .../api-change-kinesisanalyticsv2-4210.json | 5 + .../api-change-marketplacecatalog-18838.json | 5 + botocore/data/dms/2016-01-01/service-2.json | 240 +++++++++++++- botocore/data/ec2/2016-11-15/service-2.json | 10 + .../2018-05-23/service-2.json | 308 +++++++++--------- .../2018-09-17/service-2.json | 17 +- 8 files changed, 432 insertions(+), 163 deletions(-) create mode 100644 .changes/next-release/api-change-dms-93546.json create mode 100644 .changes/next-release/api-change-ec2-82257.json create mode 100644 .changes/next-release/api-change-kinesisanalyticsv2-4210.json create mode 100644 .changes/next-release/api-change-marketplacecatalog-18838.json diff --git a/.changes/next-release/api-change-dms-93546.json b/.changes/next-release/api-change-dms-93546.json new file mode 100644 index 0000000000..e3fcc8fc21 --- /dev/null +++ b/.changes/next-release/api-change-dms-93546.json @@ -0,0 +1,5 @@ +{ + "category": "``dms``", + "type": "api-change", + "description": "Update dms client to latest version" +} diff --git a/.changes/next-release/api-change-ec2-82257.json b/.changes/next-release/api-change-ec2-82257.json new file mode 100644 index 0000000000..c936aeaa44 --- /dev/null +++ b/.changes/next-release/api-change-ec2-82257.json @@ -0,0 +1,5 @@ +{ + "category": "``ec2``", + "type": "api-change", + "description": "Update ec2 client to latest version" +} diff --git a/.changes/next-release/api-change-kinesisanalyticsv2-4210.json b/.changes/next-release/api-change-kinesisanalyticsv2-4210.json new file mode 100644 index 0000000000..16d785d834 --- /dev/null +++ b/.changes/next-release/api-change-kinesisanalyticsv2-4210.json @@ -0,0 +1,5 @@ +{ + "category": "``kinesisanalyticsv2``", + "type": "api-change", + "description": "Update kinesisanalyticsv2 client to latest version" +} diff --git a/.changes/next-release/api-change-marketplacecatalog-18838.json b/.changes/next-release/api-change-marketplacecatalog-18838.json new file mode 100644 index 0000000000..87ebfd0667 --- /dev/null +++ b/.changes/next-release/api-change-marketplacecatalog-18838.json @@ -0,0 +1,5 @@ +{ + "category": "``marketplace-catalog``", + "type": "api-change", + "description": "Update marketplace-catalog client to latest version" +} diff --git a/botocore/data/dms/2016-01-01/service-2.json b/botocore/data/dms/2016-01-01/service-2.json index 9fa7d12c0e..27bfbdaea6 100644 --- a/botocore/data/dms/2016-01-01/service-2.json +++ b/botocore/data/dms/2016-01-01/service-2.json @@ -983,6 +983,14 @@ "member":{"shape":"Certificate"} }, "CertificateWallet":{"type":"blob"}, + "CharLengthSemantics":{ + "type":"string", + "enum":[ + "default", + "char", + "byte" + ] + }, "CompressionTypeValue":{ "type":"string", "enum":[ @@ -1402,6 +1410,25 @@ "parquet" ] }, + "DatePartitionDelimiterValue":{ + "type":"string", + "enum":[ + "SLASH", + "UNDERSCORE", + "DASH", + "NONE" + ] + }, + "DatePartitionSequenceValue":{ + "type":"string", + "enum":[ + "YYYYMMDD", + "YYYYMMDDHH", + "YYYYMM", + "MMYYYYDD", + "DDMMYYYY" + ] + }, "DeleteCertificateMessage":{ "type":"structure", "required":["CertificateArn"], @@ -2322,11 +2349,11 @@ }, "EndpointUri":{ "shape":"String", - "documentation":"

The endpoint for the Elasticsearch cluster.

" + "documentation":"

The endpoint for the Elasticsearch cluster. AWS DMS uses HTTPS if a transport protocol (http/https) is not specified.

" }, "FullLoadErrorPercentage":{ "shape":"IntegerOptional", - "documentation":"

The maximum percentage of records that can fail to be written before a full load operation stops.

" + "documentation":"

The maximum percentage of records that can fail to be written before a full load operation stops.

To avoid early failure, this counter is only effective after 1000 records are transferred. Elasticsearch also has the concept of error monitoring during the last 10 minutes of an Observation Window. If transfer of all records fail in the last 10 minutes, the full load operation stops.

" }, "ErrorRetryDuration":{ "shape":"IntegerOptional", @@ -2607,7 +2634,7 @@ "documentation":"

The filter value, which can specify one or more values used to narrow the returned results.

" } }, - "documentation":"

Identifies the name and value of a filter object. This filter is used to limit the number and type of AWS DMS objects that are returned for a particular Describe* or similar operation.

" + "documentation":"

Identifies the name and value of a filter object. This filter is used to limit the number and type of AWS DMS objects that are returned for a particular Describe* call or similar operation. Filters are used as an optional parameter to the following APIs.

" }, "FilterList":{ "type":"list", @@ -2636,6 +2663,18 @@ "shape":"String", "documentation":"

Fully qualified domain name of the endpoint.

" }, + "SetDataCaptureChanges":{ + "shape":"BooleanOptional", + "documentation":"

Enables ongoing replication (CDC) as a BOOLEAN value. The default is true.

" + }, + "CurrentLsn":{ + "shape":"String", + "documentation":"

For ongoing replication (CDC), use CurrentLSN to specify a log sequence number (LSN) where you want the replication to start.

" + }, + "MaxKBytesPerRead":{ + "shape":"IntegerOptional", + "documentation":"

Maximum number of bytes per read, as a NUMBER value. The default is 64 KB.

" + }, "Username":{ "shape":"String", "documentation":"

Endpoint connection user name.

" @@ -2912,14 +2951,30 @@ "shape":"IntegerOptional", "documentation":"

Endpoint TCP port.

" }, + "BcpPacketSize":{ + "shape":"IntegerOptional", + "documentation":"

The maximum size of the packets (in bytes) used to transfer data using BCP.

" + }, "DatabaseName":{ "shape":"String", "documentation":"

Database name for the endpoint.

" }, + "ControlTablesFileGroup":{ + "shape":"String", + "documentation":"

Specify a filegroup for the AWS DMS internal tables. When the replication task starts, all the internal AWS DMS control tables (awsdms_ apply_exception, awsdms_apply, awsdms_changes) are created on the specified filegroup.

" + }, "Password":{ "shape":"SecretString", "documentation":"

Endpoint connection password.

" }, + "ReadBackupOnly":{ + "shape":"BooleanOptional", + "documentation":"

When this attribute is set to Y, AWS DMS only reads changes from transaction log backups and doesn't read from the active transaction log file during ongoing replication. Setting this parameter to Y enables you to control active transaction log file growth during full load and ongoing replication tasks. However, it can add some source latency to ongoing replication.

" + }, + "SafeguardPolicy":{ + "shape":"SafeguardPolicy", + "documentation":"

Use this attribute to minimize the need to access the backup log and enable AWS DMS to prevent truncation using one of the following two methods.

Start transactions in the database: This is the default method. When this method is used, AWS DMS prevents TLOG truncation by mimicking a transaction in the database. As long as such a transaction is open, changes that appear after the transaction started aren't truncated. If you need Microsoft Replication to be enabled in your database, then you must choose this method.

Exclusively use sp_repldone within a single task: When this method is used, AWS DMS reads the changes and then uses sp_repldone to mark the TLOG transactions as ready for truncation. Although this method doesn't involve any transactional activities, it can only be used when Microsoft Replication isn't running. Also, when using this method, only one AWS DMS task can access the database at any given time. Therefore, if you need to run parallel AWS DMS tasks against the same database, use the default method.

" + }, "ServerName":{ "shape":"String", "documentation":"

Fully qualified domain name of the endpoint.

" @@ -2927,6 +2982,10 @@ "Username":{ "shape":"String", "documentation":"

Endpoint connection user name.

" + }, + "UseBcpFullLoad":{ + "shape":"BooleanOptional", + "documentation":"

Use this to attribute to transfer data for full-load operations using BCP. When the target table contains an identity column that does not exist in the source table, you must disable the use BCP for loading table option.

" } }, "documentation":"

Provides information that defines a Microsoft SQL Server endpoint.

" @@ -3309,10 +3368,30 @@ "MySQLSettings":{ "type":"structure", "members":{ + "AfterConnectScript":{ + "shape":"String", + "documentation":"

Specifies a script to run immediately after AWS DMS connects to the endpoint. The migration task continues running regardless if the SQL statement succeeds or fails.

" + }, "DatabaseName":{ "shape":"String", "documentation":"

Database name for the endpoint.

" }, + "EventsPollInterval":{ + "shape":"IntegerOptional", + "documentation":"

Specifies how often to check the binary log for new changes/events when the database is idle.

Example: eventsPollInterval=5;

In the example, AWS DMS checks for changes in the binary logs every five seconds.

" + }, + "TargetDbType":{ + "shape":"TargetDbType", + "documentation":"

Specifies where to migrate source tables on the target, either to a single database or multiple databases.

Example: targetDbType=MULTIPLE_DATABASES

" + }, + "MaxFileSize":{ + "shape":"IntegerOptional", + "documentation":"

Specifies the maximum size (in KB) of any .csv file used to transfer data to a MySQL-compatible database.

Example: maxFileSize=512

" + }, + "ParallelLoadThreads":{ + "shape":"IntegerOptional", + "documentation":"

Improves performance when loading data into the MySQLcompatible target database. Specifies how many threads to use to load the data into the MySQL-compatible target database. Setting a large number of threads can have an adverse effect on database performance, because a separate connection is required for each thread.

Example: parallelLoadThreads=1

" + }, "Password":{ "shape":"SecretString", "documentation":"

Endpoint connection password.

" @@ -3325,6 +3404,10 @@ "shape":"String", "documentation":"

Fully qualified domain name of the endpoint.

" }, + "ServerTimezone":{ + "shape":"String", + "documentation":"

Specifies the time zone for the source MySQL database.

Example: serverTimezone=US/Pacific;

Note: Do not enclose time zones in single quotes.

" + }, "Username":{ "shape":"String", "documentation":"

Endpoint connection user name.

" @@ -3380,6 +3463,62 @@ "OracleSettings":{ "type":"structure", "members":{ + "AddSupplementalLogging":{ + "shape":"BooleanOptional", + "documentation":"

Set this attribute to set up table-level supplemental logging for the Oracle database. This attribute enables PRIMARY KEY supplemental logging on all tables selected for a migration task.

If you use this option, you still need to enable database-level supplemental logging.

" + }, + "ArchivedLogDestId":{ + "shape":"IntegerOptional", + "documentation":"

Specifies the destination of the archived redo logs. The value should be the same as the DEST_ID number in the v$archived_log table. When working with multiple log destinations (DEST_ID), we recommend that you to specify an archived redo logs location identifier. Doing this improves performance by ensuring that the correct logs are accessed from the outset.

" + }, + "AdditionalArchivedLogDestId":{ + "shape":"IntegerOptional", + "documentation":"

Set this attribute with archivedLogDestId in a primary/ standby setup. This attribute is useful in the case of a switchover. In this case, AWS DMS needs to know which destination to get archive redo logs from to read changes. This need arises because the previous primary instance is now a standby instance after switchover.

" + }, + "AllowSelectNestedTables":{ + "shape":"BooleanOptional", + "documentation":"

Set this attribute to true to enable replication of Oracle tables containing columns that are nested tables or defined types.

" + }, + "ParallelAsmReadThreads":{ + "shape":"IntegerOptional", + "documentation":"

Set this attribute to change the number of threads that DMS configures to perform a Change Data Capture (CDC) load using Oracle Automatic Storage Management (ASM). You can specify an integer value between 2 (the default) and 8 (the maximum). Use this attribute together with the readAheadBlocks attribute.

" + }, + "ReadAheadBlocks":{ + "shape":"IntegerOptional", + "documentation":"

Set this attribute to change the number of read-ahead blocks that DMS configures to perform a Change Data Capture (CDC) load using Oracle Automatic Storage Management (ASM). You can specify an integer value between 1000 (the default) and 200,000 (the maximum).

" + }, + "AccessAlternateDirectly":{ + "shape":"BooleanOptional", + "documentation":"

Set this attribute to false in order to use the Binary Reader to capture change data for an Amazon RDS for Oracle as the source. This tells the DMS instance to not access redo logs through any specified path prefix replacement using direct file access.

" + }, + "UseAlternateFolderForOnline":{ + "shape":"BooleanOptional", + "documentation":"

Set this attribute to true in order to use the Binary Reader to capture change data for an Amazon RDS for Oracle as the source. This tells the DMS instance to use any specified prefix replacement to access all online redo logs.

" + }, + "OraclePathPrefix":{ + "shape":"String", + "documentation":"

Set this string attribute to the required value in order to use the Binary Reader to capture change data for an Amazon RDS for Oracle as the source. This value specifies the default Oracle root used to access the redo logs.

" + }, + "UsePathPrefix":{ + "shape":"String", + "documentation":"

Set this string attribute to the required value in order to use the Binary Reader to capture change data for an Amazon RDS for Oracle as the source. This value specifies the path prefix used to replace the default Oracle root to access the redo logs.

" + }, + "ReplacePathPrefix":{ + "shape":"BooleanOptional", + "documentation":"

Set this attribute to true in order to use the Binary Reader to capture change data for an Amazon RDS for Oracle as the source. This setting tells DMS instance to replace the default Oracle root with the specified usePathPrefix setting to access the redo logs.

" + }, + "EnableHomogenousTablespace":{ + "shape":"BooleanOptional", + "documentation":"

Set this attribute to enable homogenous tablespace replication and create existing tables or indexes under the same tablespace on the target.

" + }, + "DirectPathNoLog":{ + "shape":"BooleanOptional", + "documentation":"

When set to true, this attribute helps to increase the commit rate on the Oracle target database by writing directly to tables and not writing a trail to database logs.

" + }, + "ArchivedLogsOnly":{ + "shape":"BooleanOptional", + "documentation":"

When this field is set to Y, AWS DMS only accesses the archived redo logs. If the archived redo logs are stored on Oracle ASM only, the AWS DMS user account needs to be granted ASM privileges.

" + }, "AsmPassword":{ "shape":"SecretString", "documentation":"

For an Oracle source endpoint, your Oracle Automatic Storage Management (ASM) password. You can set this value from the asm_user_password value. You set this value as part of the comma-separated value that you set to the Password request parameter when you create the endpoint to access transaction logs using Binary Reader. For more information, see Configuration for change data capture (CDC) on an Oracle source database.

" @@ -3392,10 +3531,26 @@ "shape":"String", "documentation":"

For an Oracle source endpoint, your ASM user name. You can set this value from the asm_user value. You set asm_user as part of the extra connection attribute string to access an Oracle server with Binary Reader that uses ASM. For more information, see Configuration for change data capture (CDC) on an Oracle source database.

" }, + "CharLengthSemantics":{ + "shape":"CharLengthSemantics", + "documentation":"

Specifies whether the length of a character column is in bytes or in characters. To indicate that the character column length is in characters, set this attribute to CHAR. Otherwise, the character column length is in bytes.

Example: charLengthSemantics=CHAR;

" + }, "DatabaseName":{ "shape":"String", "documentation":"

Database name for the endpoint.

" }, + "DirectPathParallelLoad":{ + "shape":"BooleanOptional", + "documentation":"

When set to true, this attribute specifies a parallel load when useDirectPathFullLoad is set to Y. This attribute also only applies when you use the AWS DMS parallel load feature. Note that the target table cannot have any constraints or indexes.

" + }, + "FailTasksOnLobTruncation":{ + "shape":"BooleanOptional", + "documentation":"

When set to true, this attribute causes a task to fail if the actual size of an LOB column is greater than the specified LobMaxSize.

If a task is set to limited LOB mode and this option is set to true, the task fails instead of truncating the LOB data.

" + }, + "NumberDatatypeScale":{ + "shape":"IntegerOptional", + "documentation":"

Specifies the number scale. You can select a scale up to 38, or you can select FLOAT. By default, the NUMBER data type is converted to precision 38, scale 10.

Example: numberDataTypeScale=12

" + }, "Password":{ "shape":"SecretString", "documentation":"

Endpoint connection password.

" @@ -3404,6 +3559,14 @@ "shape":"IntegerOptional", "documentation":"

Endpoint TCP port.

" }, + "ReadTableSpaceName":{ + "shape":"BooleanOptional", + "documentation":"

When set to true, this attribute supports tablespace replication.

" + }, + "RetryInterval":{ + "shape":"IntegerOptional", + "documentation":"

Specifies the number of seconds that the system waits before resending a query.

Example: retryInterval=6;

" + }, "SecurityDbEncryption":{ "shape":"SecretString", "documentation":"

For an Oracle source endpoint, the transparent data encryption (TDE) password required by AWM DMS to access Oracle redo logs encrypted by TDE using Binary Reader. It is also the TDE_Password part of the comma-separated value you set to the Password request parameter when you create the endpoint. The SecurityDbEncryptian setting is related to this SecurityDbEncryptionName setting. For more information, see Supported encryption methods for using Oracle as a source for AWS DMS in the AWS Database Migration Service User Guide.

" @@ -3517,10 +3680,34 @@ "PostgreSQLSettings":{ "type":"structure", "members":{ + "AfterConnectScript":{ + "shape":"String", + "documentation":"

For use with change data capture (CDC) only, this attribute has AWS DMS bypass foreign keys and user triggers to reduce the time it takes to bulk load data.

Example: afterConnectScript=SET session_replication_role='replica'

" + }, + "CaptureDdls":{ + "shape":"BooleanOptional", + "documentation":"

To capture DDL events, AWS DMS creates various artifacts in the PostgreSQL database when the task starts. You can later remove these artifacts.

If this value is set to N, you don't have to create tables or triggers on the source database.

" + }, + "MaxFileSize":{ + "shape":"IntegerOptional", + "documentation":"

Specifies the maximum size (in KB) of any .csv file used to transfer data to PostgreSQL.

Example: maxFileSize=512

" + }, "DatabaseName":{ "shape":"String", "documentation":"

Database name for the endpoint.

" }, + "DdlArtifactsSchema":{ + "shape":"String", + "documentation":"

The schema in which the operational DDL database artifacts are created.

Example: ddlArtifactsSchema=xyzddlschema;

" + }, + "ExecuteTimeout":{ + "shape":"IntegerOptional", + "documentation":"

Sets the client statement timeout for the PostgreSQL instance, in seconds. The default value is 60 seconds.

Example: executeTimeout=100;

" + }, + "FailTasksOnLobTruncation":{ + "shape":"BooleanOptional", + "documentation":"

When set to true, this value causes a task to fail if the actual size of a LOB column is greater than the specified LobMaxSize.

If task is set to Limited LOB mode and this option is set to true, the task fails instead of truncating the LOB data.

" + }, "Password":{ "shape":"SecretString", "documentation":"

Endpoint connection password.

" @@ -3536,6 +3723,10 @@ "Username":{ "shape":"String", "documentation":"

Endpoint connection user name.

" + }, + "SlotName":{ + "shape":"String", + "documentation":"

Sets the name of a previously created logical replication slot for a CDC load of the PostgreSQL source instance.

When used with the AWS DMS API CdcStartPosition request parameter, this attribute also enables using native CDC start points.

" } }, "documentation":"

Provides information that defines a PostgreSQL endpoint.

" @@ -3576,11 +3767,11 @@ }, "BucketFolder":{ "shape":"String", - "documentation":"

The location where the comma-separated value (.csv) files are stored before being uploaded to the S3 bucket.

" + "documentation":"

An S3 folder where the comma-separated-value (.csv) files are stored before being uploaded to the target Redshift cluster.

For full load mode, AWS DMS converts source records into .csv files and loads them to the BucketFolder/TableID path. AWS DMS uses the Redshift COPY command to upload the .csv files to the target table. The files are deleted once the COPY operation has finished. For more information, see Amazon Redshift Database Developer Guide

For change-data-capture (CDC) mode, AWS DMS creates a NetChanges table, and loads the .csv files to this BucketFolder/NetChangesTableID path.

" }, "BucketName":{ "shape":"String", - "documentation":"

The name of the S3 bucket you want to use

" + "documentation":"

The name of the intermediate S3 bucket used to store .csv files before uploading data to Redshift.

" }, "ConnectionTimeout":{ "shape":"IntegerOptional", @@ -3604,15 +3795,15 @@ }, "FileTransferUploadStreams":{ "shape":"IntegerOptional", - "documentation":"

The number of threads used to upload a single file. This parameter accepts a value from 1 through 64. It defaults to 10.

" + "documentation":"

The number of threads used to upload a single file. This parameter accepts a value from 1 through 64. It defaults to 10.

The number of parallel streams used to upload a single .csv file to an S3 bucket using S3 Multipart Upload. For more information, see Multipart upload overview.

FileTransferUploadStreams accepts a value from 1 through 64. It defaults to 10.

" }, "LoadTimeout":{ "shape":"IntegerOptional", - "documentation":"

The amount of time to wait (in milliseconds) before timing out, beginning from when you begin loading.

" + "documentation":"

The amount of time to wait (in milliseconds) before timing out of operations performed by AWS DMS on a Redshift cluster, such as Redshift COPY, INSERT, DELETE, and UPDATE.

" }, "MaxFileSize":{ "shape":"IntegerOptional", - "documentation":"

The maximum size (in KB) of any .csv file used to transfer data to Amazon Redshift. This accepts a value from 1 through 1,048,576. It defaults to 32,768 KB (32 MB).

" + "documentation":"

The maximum size (in KB) of any .csv file used to load data on an S3 bucket and transfer data to Amazon Redshift. It defaults to 1048576KB (1 GB).

" }, "Password":{ "shape":"SecretString", @@ -3664,7 +3855,7 @@ }, "WriteBufferSize":{ "shape":"IntegerOptional", - "documentation":"

The size of the write buffer to use in rows. Valid values range from 1 through 2,048. The default is 1,024. Use this setting to tune performance.

" + "documentation":"

The size (in KB) of the in-memory file write buffer used when generating .csv files on the local disk at the DMS replication instance. The default value is 1000 (buffer size is 1000KB).

" } }, "documentation":"

Provides information that defines an Amazon Redshift endpoint.

" @@ -3983,7 +4174,7 @@ "documentation":"

The subnets that are in the subnet group.

" } }, - "documentation":"

Describes a subnet group in response to a request by the DescribeReplicationSubnetGroup operation.

" + "documentation":"

Describes a subnet group in response to a request by the DescribeReplicationSubnetGroups operation.

" }, "ReplicationSubnetGroupDoesNotCoverEnoughAZs":{ "type":"structure", @@ -4423,6 +4614,18 @@ "CdcInsertsAndUpdates":{ "shape":"BooleanOptional", "documentation":"

A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or .parquet (columnar storage) output files. The default setting is false, but when CdcInsertsAndUpdates is set to true or y, only INSERTs and UPDATEs from the source database are migrated to the .csv or .parquet file.

For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the IncludeOpForFullLoad parameter. If IncludeOpForFullLoad is set to true, the first field of every CDC record is set to either I or U to indicate INSERT and UPDATE operations at the source. But if IncludeOpForFullLoad is set to false, CDC records are written without an indication of INSERT or UPDATE operations at the source. For more information about how these settings work together, see Indicating Source DB Operations in Migrated S3 Data in the AWS Database Migration Service User Guide..

AWS DMS supports the use of the CdcInsertsAndUpdates parameter in versions 3.3.1 and later.

CdcInsertsOnly and CdcInsertsAndUpdates can't both be set to true for the same endpoint. Set either CdcInsertsOnly or CdcInsertsAndUpdates to true for the same endpoint, but not both.

" + }, + "DatePartitionEnabled":{ + "shape":"BooleanOptional", + "documentation":"

When set to true, this parameter partitions S3 bucket folders based on transaction commit dates. The default value is false. For more information about date-based folder partitoning, see Using date-based folder partitioning

" + }, + "DatePartitionSequence":{ + "shape":"DatePartitionSequenceValue", + "documentation":"

Identifies the sequence of the date format to use during folder partitioning. The default value is YYYYMMDD. Use this parameter when DatePartitionedEnabled is set to true.

" + }, + "DatePartitionDelimiter":{ + "shape":"DatePartitionDelimiterValue", + "documentation":"

Specifies a date separating delimiter to use during folder partitioning. The default value is SLASH (/). Use this parameter when DatePartitionedEnabled is set to true.

" } }, "documentation":"

Settings for exporting data to Amazon S3.

" @@ -4449,6 +4652,14 @@ "documentation":"

You are not authorized for the SNS subscription.

", "exception":true }, + "SafeguardPolicy":{ + "type":"string", + "enum":[ + "rely-on-sql-server-replication-agent", + "exclusive-automatic-truncation", + "shared-automatic-truncation" + ] + }, "SchemaList":{ "type":"list", "member":{"shape":"String"} @@ -4641,7 +4852,7 @@ "documentation":"

The status of the subnet.

" } }, - "documentation":"

In response to a request by the DescribeReplicationSubnetGroup operation, this object identifies a subnet by its given Availability Zone, subnet identifier, and status.

" + "documentation":"

In response to a request by the DescribeReplicationSubnetGroups operation, this object identifies a subnet by its given Availability Zone, subnet identifier, and status.

" }, "SubnetAlreadyInUse":{ "type":"structure", @@ -4845,6 +5056,13 @@ "type":"list", "member":{"shape":"Tag"} }, + "TargetDbType":{ + "type":"string", + "enum":[ + "specific-database", + "multiple-databases" + ] + }, "TestConnectionMessage":{ "type":"structure", "required":[ diff --git a/botocore/data/ec2/2016-11-15/service-2.json b/botocore/data/ec2/2016-11-15/service-2.json index 793bf28540..d70d32e2c6 100644 --- a/botocore/data/ec2/2016-11-15/service-2.json +++ b/botocore/data/ec2/2016-11-15/service-2.json @@ -26189,6 +26189,11 @@ "documentation":"

The peer BGP ASN.

", "locationName":"peerBgpAsn" }, + "OwnerId":{ + "shape":"String", + "documentation":"

The AWS account ID that owns the local gateway virtual interface.

", + "locationName":"ownerId" + }, "Tags":{ "shape":"TagList", "documentation":"

The tags assigned to the virtual interface.

", @@ -26215,6 +26220,11 @@ "documentation":"

The ID of the local gateway.

", "locationName":"localGatewayId" }, + "OwnerId":{ + "shape":"String", + "documentation":"

The AWS account ID that owns the local gateway virtual interface group.

", + "locationName":"ownerId" + }, "Tags":{ "shape":"TagList", "documentation":"

The tags assigned to the virtual interface group.

", diff --git a/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json b/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json index 107efcb8a7..1b6478ffaa 100644 --- a/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json +++ b/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json @@ -48,7 +48,7 @@ {"shape":"CodeValidationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Adds a streaming source to your SQL-based Amazon Kinesis Data Analytics application.

You can add a streaming source when you create an application, or you can use this operation to add a streaming source after you create an application. For more information, see CreateApplication.

Any configuration update, including adding a streaming source using this operation, results in a new version of the application. You can use the DescribeApplication operation to find the current application version.

" + "documentation":"

Adds a streaming source to your SQL-based Kinesis Data Analytics application.

You can add a streaming source when you create an application, or you can use this operation to add a streaming source after you create an application. For more information, see CreateApplication.

Any configuration update, including adding a streaming source using this operation, results in a new version of the application. You can use the DescribeApplication operation to find the current application version.

" }, "AddApplicationInputProcessingConfiguration":{ "name":"AddApplicationInputProcessingConfiguration", @@ -65,7 +65,7 @@ {"shape":"ConcurrentModificationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Adds an InputProcessingConfiguration to an SQL-based Kinesis Data Analytics application. An input processor pre-processes records on the input stream before the application's SQL code executes. Currently, the only input processor available is AWS Lambda.

" + "documentation":"

Adds an InputProcessingConfiguration to a SQL-based Kinesis Data Analytics application. An input processor pre-processes records on the input stream before the application's SQL code executes. Currently, the only input processor available is AWS Lambda.

" }, "AddApplicationOutput":{ "name":"AddApplicationOutput", @@ -82,7 +82,7 @@ {"shape":"ConcurrentModificationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Adds an external destination to your SQL-based Amazon Kinesis Data Analytics application.

If you want Kinesis Data Analytics to deliver data from an in-application stream within your application to an external destination (such as an Kinesis data stream, a Kinesis Data Firehose delivery stream, or an AWS Lambda function), you add the relevant configuration to your application using this operation. You can configure one or more outputs for your application. Each output configuration maps an in-application stream and an external destination.

You can use one of the output configurations to deliver data from your in-application error stream to an external destination so that you can analyze the errors.

Any configuration update, including adding a streaming source using this operation, results in a new version of the application. You can use the DescribeApplication operation to find the current application version.

" + "documentation":"

Adds an external destination to your SQL-based Kinesis Data Analytics application.

If you want Kinesis Data Analytics to deliver data from an in-application stream within your application to an external destination (such as an Kinesis data stream, a Kinesis Data Firehose delivery stream, or an AWS Lambda function), you add the relevant configuration to your application using this operation. You can configure one or more outputs for your application. Each output configuration maps an in-application stream and an external destination.

You can use one of the output configurations to deliver data from your in-application error stream to an external destination so that you can analyze the errors.

Any configuration update, including adding a streaming source using this operation, results in a new version of the application. You can use the DescribeApplication operation to find the current application version.

" }, "AddApplicationReferenceDataSource":{ "name":"AddApplicationReferenceDataSource", @@ -99,7 +99,7 @@ {"shape":"ConcurrentModificationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Adds a reference data source to an existing SQL-based Amazon Kinesis Data Analytics application.

Kinesis Data Analytics reads reference data (that is, an Amazon S3 object) and creates an in-application table within your application. In the request, you provide the source (S3 bucket name and object key name), name of the in-application table to create, and the necessary mapping information that describes how data in an Amazon S3 object maps to columns in the resulting in-application table.

" + "documentation":"

Adds a reference data source to an existing SQL-based Kinesis Data Analytics application.

Kinesis Data Analytics reads reference data (that is, an Amazon S3 object) and creates an in-application table within your application. In the request, you provide the source (S3 bucket name and object key name), name of the in-application table to create, and the necessary mapping information that describes how data in an Amazon S3 object maps to columns in the resulting in-application table.

" }, "AddApplicationVpcConfiguration":{ "name":"AddApplicationVpcConfiguration", @@ -135,7 +135,7 @@ {"shape":"TooManyTagsException"}, {"shape":"ConcurrentModificationException"} ], - "documentation":"

Creates an Amazon Kinesis Data Analytics application. For information about creating a Kinesis Data Analytics application, see Creating an Application.

" + "documentation":"

Creates a Kinesis Data Analytics application. For information about creating a Kinesis Data Analytics application, see Creating an Application.

" }, "CreateApplicationSnapshot":{ "name":"CreateApplicationSnapshot", @@ -190,7 +190,7 @@ {"shape":"InvalidRequestException"}, {"shape":"InvalidApplicationConfigurationException"} ], - "documentation":"

Deletes an Amazon CloudWatch log stream from an Amazon Kinesis Data Analytics application.

" + "documentation":"

Deletes an Amazon CloudWatch log stream from an Kinesis Data Analytics application.

" }, "DeleteApplicationInputProcessingConfiguration":{ "name":"DeleteApplicationInputProcessingConfiguration", @@ -224,7 +224,7 @@ {"shape":"ConcurrentModificationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Deletes the output destination configuration from your SQL-based Amazon Kinesis Data Analytics application's configuration. Kinesis Data Analytics will no longer write data from the corresponding in-application stream to the external output destination.

" + "documentation":"

Deletes the output destination configuration from your SQL-based Kinesis Data Analytics application's configuration. Kinesis Data Analytics will no longer write data from the corresponding in-application stream to the external output destination.

" }, "DeleteApplicationReferenceDataSource":{ "name":"DeleteApplicationReferenceDataSource", @@ -241,7 +241,7 @@ {"shape":"ConcurrentModificationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Deletes a reference data source configuration from the specified SQL-based Amazon Kinesis Data Analytics application's configuration.

If the application is running, Kinesis Data Analytics immediately removes the in-application table that you created using the AddApplicationReferenceDataSource operation.

" + "documentation":"

Deletes a reference data source configuration from the specified SQL-based Kinesis Data Analytics application's configuration.

If the application is running, Kinesis Data Analytics immediately removes the in-application table that you created using the AddApplicationReferenceDataSource operation.

" }, "DeleteApplicationSnapshot":{ "name":"DeleteApplicationSnapshot", @@ -290,7 +290,7 @@ {"shape":"InvalidArgumentException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Returns information about a specific Amazon Kinesis Data Analytics application.

If you want to retrieve a list of all applications in your account, use the ListApplications operation.

" + "documentation":"

Returns information about a specific Kinesis Data Analytics application.

If you want to retrieve a list of all applications in your account, use the ListApplications operation.

" }, "DescribeApplicationSnapshot":{ "name":"DescribeApplicationSnapshot", @@ -322,7 +322,7 @@ {"shape":"ServiceUnavailableException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Infers a schema for an SQL-based Amazon Kinesis Data Analytics application by evaluating sample records on the specified streaming source (Kinesis data stream or Kinesis Data Firehose delivery stream) or Amazon S3 object. In the response, the operation returns the inferred schema and also the sample records that the operation used to infer the schema.

You can use the inferred schema when configuring a streaming source for your application. When you create an application using the Kinesis Data Analytics console, the console uses this operation to infer a schema and show it in the console user interface.

" + "documentation":"

Infers a schema for a SQL-based Kinesis Data Analytics application by evaluating sample records on the specified streaming source (Kinesis data stream or Kinesis Data Firehose delivery stream) or Amazon S3 object. In the response, the operation returns the inferred schema and also the sample records that the operation used to infer the schema.

You can use the inferred schema when configuring a streaming source for your application. When you create an application using the Kinesis Data Analytics console, the console uses this operation to infer a schema and show it in the console user interface.

" }, "ListApplicationSnapshots":{ "name":"ListApplicationSnapshots", @@ -349,7 +349,7 @@ "errors":[ {"shape":"InvalidRequestException"} ], - "documentation":"

Returns a list of Amazon Kinesis Data Analytics applications in your account. For each application, the response includes the application name, Amazon Resource Name (ARN), and status.

If you want detailed information about a specific application, use DescribeApplication.

" + "documentation":"

Returns a list of Kinesis Data Analytics applications in your account. For each application, the response includes the application name, Amazon Resource Name (ARN), and status.

If you want detailed information about a specific application, use DescribeApplication.

" }, "ListTagsForResource":{ "name":"ListTagsForResource", @@ -381,7 +381,7 @@ {"shape":"InvalidApplicationConfigurationException"}, {"shape":"InvalidRequestException"} ], - "documentation":"

Starts the specified Amazon Kinesis Data Analytics application. After creating an application, you must exclusively call this operation to start your application.

" + "documentation":"

Starts the specified Kinesis Data Analytics application. After creating an application, you must exclusively call this operation to start your application.

" }, "StopApplication":{ "name":"StopApplication", @@ -396,7 +396,8 @@ {"shape":"ResourceInUseException"}, {"shape":"InvalidArgumentException"}, {"shape":"InvalidRequestException"}, - {"shape":"InvalidApplicationConfigurationException"} + {"shape":"InvalidApplicationConfigurationException"}, + {"shape":"ConcurrentModificationException"} ], "documentation":"

Stops the application from processing data. You can stop an application only if it is in the running state. You can use the DescribeApplication operation to find the application state.

" }, @@ -415,7 +416,7 @@ {"shape":"InvalidArgumentException"}, {"shape":"ConcurrentModificationException"} ], - "documentation":"

Adds one or more key-value tags to a Kinesis Analytics application. Note that the maximum number of application tags includes system tags. The maximum number of user-defined application tags is 50. For more information, see Using Tagging.

" + "documentation":"

Adds one or more key-value tags to a Kinesis Data Analytics application. Note that the maximum number of application tags includes system tags. The maximum number of user-defined application tags is 50. For more information, see Using Tagging.

" }, "UntagResource":{ "name":"UntagResource", @@ -432,7 +433,7 @@ {"shape":"InvalidArgumentException"}, {"shape":"ConcurrentModificationException"} ], - "documentation":"

Removes one or more tags from a Kinesis Analytics application. For more information, see Using Tagging.

" + "documentation":"

Removes one or more tags from a Kinesis Data Analytics application. For more information, see Using Tagging.

" }, "UpdateApplication":{ "name":"UpdateApplication", @@ -451,7 +452,7 @@ {"shape":"InvalidRequestException"}, {"shape":"InvalidApplicationConfigurationException"} ], - "documentation":"

Updates an existing Amazon Kinesis Data Analytics application. Using this operation, you can update application code, input configuration, and output configuration.

Kinesis Data Analytics updates the ApplicationVersionId each time you update your application.

" + "documentation":"

Updates an existing Kinesis Data Analytics application. Using this operation, you can update application code, input configuration, and output configuration.

Kinesis Data Analytics updates the ApplicationVersionId each time you update your application.

You cannot update the RuntimeEnvironment of an existing application. If you need to update an application's RuntimeEnvironment, you must delete the application and create it again.

" } }, "shapes":{ @@ -534,7 +535,7 @@ }, "InputId":{ "shape":"Id", - "documentation":"

The input ID that is associated with the application input. This is the ID that Amazon Kinesis Data Analytics assigns to each input configuration that you add to your application.

" + "documentation":"

The input ID that is associated with the application input. This is the ID that Kinesis Data Analytics assigns to each input configuration that you add to your application.

" }, "InputProcessingConfigurationDescription":{ "shape":"InputProcessingConfigurationDescription", @@ -651,7 +652,7 @@ }, "ApplicationVersionId":{ "shape":"ApplicationVersionId", - "documentation":"

The updated application version ID. Amazon Kinesis Data Analytics increments this ID when the application is updated.

" + "documentation":"

The updated application version ID. Kinesis Data Analytics increments this ID when the application is updated.

" }, "ReferenceDataSourceDescriptions":{ "shape":"ReferenceDataSourceDescriptions", @@ -673,7 +674,7 @@ }, "CurrentApplicationVersionId":{ "shape":"ApplicationVersionId", - "documentation":"

The version of the application to which you want to add the input processing configuration. You can use the DescribeApplication operation to get the current application version. If the version specified is not the current version, the ConcurrentModificationException is returned.

" + "documentation":"

The version of the application to which you want to add the VPC configuration. You can use the DescribeApplication operation to get the current application version. If the version specified is not the current version, the ConcurrentModificationException is returned.

" }, "VpcConfiguration":{ "shape":"VpcConfiguration", @@ -711,7 +712,7 @@ "documentation":"

Specifies whether the code content is in text or zip format.

" } }, - "documentation":"

Describes code configuration for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes code configuration for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationCodeConfigurationDescription":{ "type":"structure", @@ -726,7 +727,7 @@ "documentation":"

Describes details about the location and format of the application code.

" } }, - "documentation":"

Describes code configuration for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes code configuration for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationCodeConfigurationUpdate":{ "type":"structure", @@ -740,7 +741,7 @@ "documentation":"

Describes updates to the code content of an application.

" } }, - "documentation":"

Describes updates to a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes code configuration updates to a Flink-based Kinesis Data Analytics application.

" }, "ApplicationConfiguration":{ "type":"structure", @@ -748,41 +749,41 @@ "members":{ "SqlApplicationConfiguration":{ "shape":"SqlApplicationConfiguration", - "documentation":"

The creation and update parameters for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

The creation and update parameters for a SQL-based Kinesis Data Analytics application.

" }, "FlinkApplicationConfiguration":{ "shape":"FlinkApplicationConfiguration", - "documentation":"

The creation and update parameters for a Java-based Kinesis Data Analytics application.

" + "documentation":"

The creation and update parameters for a Flink-based Kinesis Data Analytics application.

" }, "EnvironmentProperties":{ "shape":"EnvironmentProperties", - "documentation":"

Describes execution properties for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes execution properties for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationCodeConfiguration":{ "shape":"ApplicationCodeConfiguration", - "documentation":"

The code location and type parameters for a Java-based Kinesis Data Analytics application.

" + "documentation":"

The code location and type parameters for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationSnapshotConfiguration":{ "shape":"ApplicationSnapshotConfiguration", - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" }, "VpcConfigurations":{ "shape":"VpcConfigurations", "documentation":"

The array of descriptions of VPC configurations available to the application.

" } }, - "documentation":"

Specifies the creation parameters for an Amazon Kinesis Data Analytics application.

" + "documentation":"

Specifies the creation parameters for a Kinesis Data Analytics application.

" }, "ApplicationConfigurationDescription":{ "type":"structure", "members":{ "SqlApplicationConfigurationDescription":{ "shape":"SqlApplicationConfigurationDescription", - "documentation":"

The details about inputs, outputs, and reference data sources for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

The details about inputs, outputs, and reference data sources for a SQL-based Kinesis Data Analytics application.

" }, "ApplicationCodeConfigurationDescription":{ "shape":"ApplicationCodeConfigurationDescription", - "documentation":"

The details about the application code for a Java-based Kinesis Data Analytics application.

" + "documentation":"

The details about the application code for a Flink-based Kinesis Data Analytics application.

" }, "RunConfigurationDescription":{ "shape":"RunConfigurationDescription", @@ -790,45 +791,45 @@ }, "FlinkApplicationConfigurationDescription":{ "shape":"FlinkApplicationConfigurationDescription", - "documentation":"

The details about a Java-based Kinesis Data Analytics application.

" + "documentation":"

The details about a Flink-based Kinesis Data Analytics application.

" }, "EnvironmentPropertyDescriptions":{ "shape":"EnvironmentPropertyDescriptions", - "documentation":"

Describes execution properties for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes execution properties for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationSnapshotConfigurationDescription":{ "shape":"ApplicationSnapshotConfigurationDescription", - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" }, "VpcConfigurationDescriptions":{ "shape":"VpcConfigurationDescriptions", "documentation":"

The array of descriptions of VPC configurations available to the application.

" } }, - "documentation":"

Describes details about the application code and starting parameters for an Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes details about the application code and starting parameters for a Kinesis Data Analytics application.

" }, "ApplicationConfigurationUpdate":{ "type":"structure", "members":{ "SqlApplicationConfigurationUpdate":{ "shape":"SqlApplicationConfigurationUpdate", - "documentation":"

Describes updates to an SQL-based Kinesis Data Analytics application's configuration.

" + "documentation":"

Describes updates to a SQL-based Kinesis Data Analytics application's configuration.

" }, "ApplicationCodeConfigurationUpdate":{ "shape":"ApplicationCodeConfigurationUpdate", - "documentation":"

Describes updates to a Java-based Kinesis Data Analytics application's code configuration.

" + "documentation":"

Describes updates to a Flink-based Kinesis Data Analytics application's code configuration.

" }, "FlinkApplicationConfigurationUpdate":{ "shape":"FlinkApplicationConfigurationUpdate", - "documentation":"

Describes updates to a Java-based Kinesis Data Analytics application's configuration.

" + "documentation":"

Describes updates to a Flink-based Kinesis Data Analytics application's configuration.

" }, "EnvironmentPropertyUpdates":{ "shape":"EnvironmentPropertyUpdates", - "documentation":"

Describes updates to the environment properties for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes updates to the environment properties for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationSnapshotConfigurationUpdate":{ "shape":"ApplicationSnapshotConfigurationUpdate", - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" }, "VpcConfigurationUpdates":{ "shape":"VpcConfigurationUpdates", @@ -866,7 +867,7 @@ }, "RuntimeEnvironment":{ "shape":"RuntimeEnvironment", - "documentation":"

The runtime environment for the application (SQL-1.0 or FLINK-1_6).

" + "documentation":"

The runtime environment for the application (SQL-1.0, FLINK-1_6, or FLINK-1_8).

" }, "ServiceExecutionRole":{ "shape":"RoleARN", @@ -890,7 +891,7 @@ }, "ApplicationConfigurationDescription":{ "shape":"ApplicationConfigurationDescription", - "documentation":"

Provides details about the application's SQL or Java code and starting parameters.

" + "documentation":"

Provides details about the application's Java, SQL, or Scala code and starting parameters.

" }, "CloudWatchLoggingOptionDescriptions":{ "shape":"CloudWatchLoggingOptionDescriptions", @@ -934,10 +935,10 @@ "members":{ "SnapshotsEnabled":{ "shape":"BooleanObject", - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" } }, - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationSnapshotConfigurationDescription":{ "type":"structure", @@ -945,10 +946,10 @@ "members":{ "SnapshotsEnabled":{ "shape":"BooleanObject", - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" } }, - "documentation":"

Describes whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationSnapshotConfigurationUpdate":{ "type":"structure", @@ -956,10 +957,10 @@ "members":{ "SnapshotsEnabledUpdate":{ "shape":"BooleanObject", - "documentation":"

Describes updates to whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes updates to whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" } }, - "documentation":"

Describes updates to whether snapshots are enabled for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes updates to whether snapshots are enabled for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationStatus":{ "type":"string", @@ -970,7 +971,8 @@ "READY", "RUNNING", "UPDATING", - "AUTOSCALING" + "AUTOSCALING", + "FORCE_STOPPING" ] }, "ApplicationSummaries":{ @@ -1005,7 +1007,7 @@ }, "RuntimeEnvironment":{ "shape":"RuntimeEnvironment", - "documentation":"

The runtime environment for the application (SQL-1.0 or FLINK-1_6).

" + "documentation":"

The runtime environment for the application (SQL-1.0, FLINK-1_6, or FLINK-1_8).

" } }, "documentation":"

Provides application summary information, including the application Amazon Resource Name (ARN), name, and status.

" @@ -1038,7 +1040,7 @@ "documentation":"

The column delimiter. For example, in a CSV format, a comma (\",\") is the typical column delimiter.

" } }, - "documentation":"

For an SQL-based application, provides additional mapping information when the record format uses delimiters, such as CSV. For example, the following sample records use CSV format, where the records use the '\\n' as the row delimiter and a comma (\",\") as the column delimiter:

\"name1\", \"address1\"

\"name2\", \"address2\"

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, provides additional mapping information when the record format uses delimiters, such as CSV. For example, the following sample records use CSV format, where the records use the '\\n' as the row delimiter and a comma (\",\") as the column delimiter:

\"name1\", \"address1\"

\"name2\", \"address2\"

" }, "CheckpointConfiguration":{ "type":"structure", @@ -1046,11 +1048,11 @@ "members":{ "ConfigurationType":{ "shape":"ConfigurationType", - "documentation":"

Describes whether the application uses Amazon Kinesis Data Analytics' default checkpointing behavior. You must set this property to CUSTOM in order to set the CheckpointingEnabled, CheckpointInterval, or MinPauseBetweenCheckpoints parameters.

If this value is set to DEFAULT, the application will use the following values, even if they are set to other values using APIs or application code:

" + "documentation":"

Describes whether the application uses Kinesis Data Analytics' default checkpointing behavior. You must set this property to CUSTOM in order to set the CheckpointingEnabled, CheckpointInterval, or MinPauseBetweenCheckpoints parameters.

If this value is set to DEFAULT, the application will use the following values, even if they are set to other values using APIs or application code:

" }, "CheckpointingEnabled":{ "shape":"BooleanObject", - "documentation":"

Describes whether checkpointing is enabled for a Java-based Kinesis Data Analytics application.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a CheckpointingEnabled value of true, even if this value is set to another value using this API or in application code.

" + "documentation":"

Describes whether checkpointing is enabled for a Flink-based Kinesis Data Analytics application.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a CheckpointingEnabled value of true, even if this value is set to another value using this API or in application code.

" }, "CheckpointInterval":{ "shape":"CheckpointInterval", @@ -1058,10 +1060,10 @@ }, "MinPauseBetweenCheckpoints":{ "shape":"MinPauseBetweenCheckpoints", - "documentation":"

Describes the minimum time in milliseconds after a checkpoint operation completes that a new checkpoint operation can start. If a checkpoint operation takes longer than the CheckpointInterval, the application otherwise performs continual checkpoint operations. For more information, see Tuning Checkpointing in the Apache Flink Documentation.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a MinPauseBetweenCheckpoints value of 5000, even if this value is set using this API or in application code.

" + "documentation":"

Describes the minimum time in milliseconds after a checkpoint operation completes that a new checkpoint operation can start. If a checkpoint operation takes longer than the CheckpointInterval, the application otherwise performs continual checkpoint operations. For more information, see Tuning Checkpointing in the Apache Flink Documentation.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a MinPauseBetweenCheckpoints value of 5000, even if this value is set using this API or in application code.

" } }, - "documentation":"

Describes an application's checkpointing configuration. Checkpointing is the process of persisting application state for fault tolerance. For more information, see Checkpoints for Fault Tolerance in the Apache Flink Documentation.

" + "documentation":"

Describes an application's checkpointing configuration. Checkpointing is the process of persisting application state for fault tolerance. For more information, see Checkpoints for Fault Tolerance in the Apache Flink Documentation.

" }, "CheckpointConfigurationDescription":{ "type":"structure", @@ -1072,7 +1074,7 @@ }, "CheckpointingEnabled":{ "shape":"BooleanObject", - "documentation":"

Describes whether checkpointing is enabled for a Java-based Kinesis Data Analytics application.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a CheckpointingEnabled value of true, even if this value is set to another value using this API or in application code.

" + "documentation":"

Describes whether checkpointing is enabled for a Flink-based Kinesis Data Analytics application.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a CheckpointingEnabled value of true, even if this value is set to another value using this API or in application code.

" }, "CheckpointInterval":{ "shape":"CheckpointInterval", @@ -1083,7 +1085,7 @@ "documentation":"

Describes the minimum time in milliseconds after a checkpoint operation completes that a new checkpoint operation can start.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a MinPauseBetweenCheckpoints value of 5000, even if this value is set using this API or in application code.

" } }, - "documentation":"

Describes checkpointing parameters for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes checkpointing parameters for a Flink-based Kinesis Data Analytics application.

" }, "CheckpointConfigurationUpdate":{ "type":"structure", @@ -1105,7 +1107,7 @@ "documentation":"

Describes updates to the minimum time in milliseconds after a checkpoint operation completes that a new checkpoint operation can start.

If CheckpointConfiguration.ConfigurationType is DEFAULT, the application will use a MinPauseBetweenCheckpoints value of 5000, even if this value is set using this API or in application code.

" } }, - "documentation":"

Describes updates to the checkpointing parameters for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes updates to the checkpointing parameters for a Flink-based Kinesis Data Analytics application.

" }, "CheckpointInterval":{ "type":"long", @@ -1173,18 +1175,18 @@ "members":{ "TextContent":{ "shape":"TextContent", - "documentation":"

The text-format code for a Java-based Kinesis Data Analytics application.

" + "documentation":"

The text-format code for a Flink-based Kinesis Data Analytics application.

" }, "ZipFileContent":{ "shape":"ZipFileContent", - "documentation":"

The zip-format code for a Java-based Kinesis Data Analytics application.

" + "documentation":"

The zip-format code for a Flink-based Kinesis Data Analytics application.

" }, "S3ContentLocation":{ "shape":"S3ContentLocation", "documentation":"

Information about the Amazon S3 bucket containing the application code.

" } }, - "documentation":"

Specifies either the application code, or the location of the application code, for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Specifies either the application code, or the location of the application code, for a Flink-based Kinesis Data Analytics application.

" }, "CodeContentDescription":{ "type":"structure", @@ -1206,7 +1208,7 @@ "documentation":"

The S3 bucket Amazon Resource Name (ARN), file key, and object version of the application code stored in Amazon S3.

" } }, - "documentation":"

Describes details about the application code for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes details about the application code for a Flink-based Kinesis Data Analytics application.

" }, "CodeContentType":{ "type":"string", @@ -1231,7 +1233,7 @@ "documentation":"

Describes an update to the location of code for an application.

" } }, - "documentation":"

Describes an update to the code of a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes an update to the code of a Flink-based Kinesis Data Analytics application.

" }, "CodeMD5":{ "type":"string", @@ -1284,7 +1286,7 @@ }, "RuntimeEnvironment":{ "shape":"RuntimeEnvironment", - "documentation":"

The runtime environment for the application (SQL-1.0 or FLINK-1_6).

" + "documentation":"

The runtime environment for the application (SQL-1.0, FLINK-1_6, or FLINK-1_8).

" }, "ServiceExecutionRole":{ "shape":"RoleARN", @@ -1624,7 +1626,7 @@ "documentation":"

Specifies the format of the records on the output stream.

" } }, - "documentation":"

Describes the data format when records are written to the destination in an SQL-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes the data format when records are written to the destination in a SQL-based Kinesis Data Analytics application.

" }, "DiscoverInputSchemaRequest":{ "type":"structure", @@ -1682,7 +1684,7 @@ "documentation":"

Describes the execution property groups.

" } }, - "documentation":"

Describes execution properties for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes execution properties for a Flink-based Kinesis Data Analytics application.

" }, "EnvironmentPropertyDescriptions":{ "type":"structure", @@ -1692,7 +1694,7 @@ "documentation":"

Describes the execution property groups.

" } }, - "documentation":"

Describes the execution properties for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes the execution properties for a Flink-based Kinesis Data Analytics application.

" }, "EnvironmentPropertyUpdates":{ "type":"structure", @@ -1703,7 +1705,7 @@ "documentation":"

Describes updates to the execution property groups.

" } }, - "documentation":"

Describes updates to the execution property groups for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes updates to the execution property groups for a Flink-based Kinesis Data Analytics application.

" }, "ErrorMessage":{"type":"string"}, "FileKey":{ @@ -1716,7 +1718,7 @@ "members":{ "CheckpointConfiguration":{ "shape":"CheckpointConfiguration", - "documentation":"

Describes an application's checkpointing configuration. Checkpointing is the process of persisting application state for fault tolerance. For more information, see Checkpoints for Fault Tolerance in the Apache Flink Documentation.

" + "documentation":"

Describes an application's checkpointing configuration. Checkpointing is the process of persisting application state for fault tolerance. For more information, see Checkpoints for Fault Tolerance in the Apache Flink Documentation.

" }, "MonitoringConfiguration":{ "shape":"MonitoringConfiguration", @@ -1727,7 +1729,7 @@ "documentation":"

Describes parameters for how an application executes multiple tasks simultaneously.

" } }, - "documentation":"

Describes configuration parameters for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes configuration parameters for a Flink-based Kinesis Data Analytics application.

" }, "FlinkApplicationConfigurationDescription":{ "type":"structure", @@ -1746,10 +1748,10 @@ }, "JobPlanDescription":{ "shape":"JobPlanDescription", - "documentation":"

The job plan for an application. For more information about the job plan, see Jobs and Scheduling in the Apache Flink Documentation. To retrieve the job plan for the application, use the DescribeApplicationRequest$IncludeAdditionalDetails parameter of the DescribeApplication operation.

" + "documentation":"

The job plan for an application. For more information about the job plan, see Jobs and Scheduling in the Apache Flink Documentation. To retrieve the job plan for the application, use the DescribeApplicationRequest$IncludeAdditionalDetails parameter of the DescribeApplication operation.

" } }, - "documentation":"

Describes configuration parameters for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes configuration parameters for a Flink-based Kinesis Data Analytics application.

" }, "FlinkApplicationConfigurationUpdate":{ "type":"structure", @@ -1767,17 +1769,17 @@ "documentation":"

Describes updates to the parameters for how an application executes multiple tasks simultaneously.

" } }, - "documentation":"

Describes updates to the configuration parameters for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes updates to the configuration parameters for a Flink-based Kinesis Data Analytics application.

" }, "FlinkRunConfiguration":{ "type":"structure", "members":{ "AllowNonRestoredState":{ "shape":"BooleanObject", - "documentation":"

When restoring from a savepoint, specifies whether the runtime is allowed to skip a state that cannot be mapped to the new program. This will happen if the program is updated between savepoints to remove stateful parameters, and state data in the savepoint no longer corresponds to valid application data. For more information, see Allowing Non-Restored State in the Apache Flink documentation.

" + "documentation":"

When restoring from a snapshot, specifies whether the runtime is allowed to skip a state that cannot be mapped to the new program. This will happen if the program is updated between snapshots to remove stateful parameters, and state data in the snapshot no longer corresponds to valid application data. For more information, see Allowing Non-Restored State in the Apache Flink documentation.

This value defaults to false. If you update your application without specifying this parameter, AllowNonRestoredState will be set to false, even if it was previously set to true.

" } }, - "documentation":"

Describes the starting parameters for an Apache Flink-based Kinesis Data Analytics application.

" + "documentation":"

Describes the starting parameters for a Flink-based Kinesis Data Analytics application.

" }, "Id":{ "type":"string", @@ -1832,7 +1834,7 @@ "documentation":"

Describes the format of the data in the streaming source, and how each data element maps to corresponding columns in the in-application stream that is being created.

Also used to describe the format of the reference data source.

" } }, - "documentation":"

When you configure the application input for an SQL-based Amazon Kinesis Data Analytics application, you specify the streaming source, the in-application stream name that is created, and the mapping between the two.

" + "documentation":"

When you configure the application input for a SQL-based Kinesis Data Analytics application, you specify the streaming source, the in-application stream name that is created, and the mapping between the two.

" }, "InputDescription":{ "type":"structure", @@ -1874,7 +1876,7 @@ "documentation":"

The point at which the application is configured to read from the input stream.

" } }, - "documentation":"

Describes the application input configuration for an SQL-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes the application input configuration for a SQL-based Kinesis Data Analytics application.

" }, "InputDescriptions":{ "type":"list", @@ -1886,10 +1888,10 @@ "members":{ "ResourceARN":{ "shape":"ResourceARN", - "documentation":"

The ARN of the AWS Lambda function that operates on records in the stream.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" + "documentation":"

The ARN of the AWS Lambda function that operates on records in the stream.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" } }, - "documentation":"

An object that contains the Amazon Resource Name (ARN) of the AWS Lambda function that is used to preprocess records in the stream in an SQL-based Amazon Kinesis Data Analytics application.

" + "documentation":"

An object that contains the Amazon Resource Name (ARN) of the AWS Lambda function that is used to preprocess records in the stream in a SQL-based Kinesis Data Analytics application.

" }, "InputLambdaProcessorDescription":{ "type":"structure", @@ -1897,14 +1899,14 @@ "members":{ "ResourceARN":{ "shape":"ResourceARN", - "documentation":"

The ARN of the AWS Lambda function that is used to preprocess the records in the stream.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" + "documentation":"

The ARN of the AWS Lambda function that is used to preprocess the records in the stream.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" }, "RoleARN":{ "shape":"RoleARN", "documentation":"

The ARN of the IAM role that is used to access the AWS Lambda function.

Provided for backward compatibility. Applications that are created with the current API version have an application-level service execution role rather than a resource-level role.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, an object that contains the Amazon Resource Name (ARN) of the AWS Lambda function that is used to preprocess records in the stream.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, an object that contains the Amazon Resource Name (ARN) of the AWS Lambda function that is used to preprocess records in the stream.

" }, "InputLambdaProcessorUpdate":{ "type":"structure", @@ -1912,10 +1914,10 @@ "members":{ "ResourceARNUpdate":{ "shape":"ResourceARN", - "documentation":"

The Amazon Resource Name (ARN) of the new AWS Lambda function that is used to preprocess the records in the stream.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" + "documentation":"

The Amazon Resource Name (ARN) of the new AWS Lambda function that is used to preprocess the records in the stream.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, represents an update to the InputLambdaProcessor that is used to preprocess the records in the stream.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, represents an update to the InputLambdaProcessor that is used to preprocess the records in the stream.

" }, "InputParallelism":{ "type":"structure", @@ -1925,7 +1927,7 @@ "documentation":"

The number of in-application streams to create.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the number of in-application streams to create for a given streaming source.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the number of in-application streams to create for a given streaming source.

" }, "InputParallelismCount":{ "type":"integer", @@ -1941,7 +1943,7 @@ "documentation":"

The number of in-application streams to create for the specified streaming source.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, provides updates to the parallelism count.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, provides updates to the parallelism count.

" }, "InputProcessingConfiguration":{ "type":"structure", @@ -1952,7 +1954,7 @@ "documentation":"

The InputLambdaProcessor that is used to preprocess the records in the stream before being processed by your application code.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes a processor that is used to preprocess the records in the stream before being processed by your application code. Currently, the only input processor available is AWS Lambda.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes a processor that is used to preprocess the records in the stream before being processed by your application code. Currently, the only input processor available is AWS Lambda.

" }, "InputProcessingConfigurationDescription":{ "type":"structure", @@ -1962,7 +1964,7 @@ "documentation":"

Provides configuration information about the associated InputLambdaProcessorDescription

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, provides the configuration information about an input processor. Currently, the only input processor available is AWS Lambda.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, provides the configuration information about an input processor. Currently, the only input processor available is AWS Lambda.

" }, "InputProcessingConfigurationUpdate":{ "type":"structure", @@ -1973,7 +1975,7 @@ "documentation":"

Provides update information for an InputLambdaProcessor.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes updates to an InputProcessingConfiguration.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes updates to an InputProcessingConfiguration.

" }, "InputSchemaUpdate":{ "type":"structure", @@ -1991,7 +1993,7 @@ "documentation":"

A list of RecordColumn objects. Each object describes the mapping of the streaming source element to the corresponding column in the in-application stream.

" } }, - "documentation":"

Describes updates for an SQL-based Amazon Kinesis Data Analytics application's input schema.

" + "documentation":"

Describes updates for an SQL-based Kinesis Data Analytics application's input schema.

" }, "InputStartingPosition":{ "type":"string", @@ -2044,7 +2046,7 @@ "documentation":"

Describes the parallelism updates (the number of in-application streams Kinesis Data Analytics creates for the specific streaming source).

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes updates to a specific input configuration (identified by the InputId of an application).

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes updates to a specific input configuration (identified by the InputId of an application).

" }, "InputUpdates":{ "type":"list", @@ -2087,7 +2089,7 @@ "documentation":"

The path to the top-level parent that contains the records.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, provides additional mapping information when JSON is the record format on the streaming source.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, provides additional mapping information when JSON is the record format on the streaming source.

" }, "JobPlanDescription":{"type":"string"}, "KinesisAnalyticsARN":{ @@ -2105,7 +2107,7 @@ "documentation":"

The Amazon Resource Name (ARN) of the delivery stream.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, identifies a Kinesis Data Firehose delivery stream as the streaming source. You provide the delivery stream's Amazon Resource Name (ARN).

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, identifies a Kinesis Data Firehose delivery stream as the streaming source. You provide the delivery stream's Amazon Resource Name (ARN).

" }, "KinesisFirehoseInputDescription":{ "type":"structure", @@ -2131,7 +2133,7 @@ "documentation":"

The Amazon Resource Name (ARN) of the input delivery stream to read.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, when updating application input configuration, provides information about a Kinesis Data Firehose delivery stream as the streaming source.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, when updating application input configuration, provides information about a Kinesis Data Firehose delivery stream as the streaming source.

" }, "KinesisFirehoseOutput":{ "type":"structure", @@ -2142,7 +2144,7 @@ "documentation":"

The ARN of the destination delivery stream to write to.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, when configuring application output, identifies a Kinesis Data Firehose delivery stream as the destination. You provide the stream Amazon Resource Name (ARN) of the delivery stream.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, when configuring application output, identifies a Kinesis Data Firehose delivery stream as the destination. You provide the stream Amazon Resource Name (ARN) of the delivery stream.

" }, "KinesisFirehoseOutputDescription":{ "type":"structure", @@ -2157,7 +2159,7 @@ "documentation":"

The ARN of the IAM role that Kinesis Data Analytics can assume to access the stream.

Provided for backward compatibility. Applications that are created with the current API version have an application-level service execution role rather than a resource-level role.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application's output, describes the Kinesis Data Firehose delivery stream that is configured as its destination.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application's output, describes the Kinesis Data Firehose delivery stream that is configured as its destination.

" }, "KinesisFirehoseOutputUpdate":{ "type":"structure", @@ -2168,7 +2170,7 @@ "documentation":"

The Amazon Resource Name (ARN) of the delivery stream to write to.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, when updating an output configuration using the UpdateApplication operation, provides information about a Kinesis Data Firehose delivery stream that is configured as the destination.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, when updating an output configuration using the UpdateApplication operation, provides information about a Kinesis Data Firehose delivery stream that is configured as the destination.

" }, "KinesisStreamsInput":{ "type":"structure", @@ -2179,7 +2181,7 @@ "documentation":"

The ARN of the input Kinesis data stream to read.

" } }, - "documentation":"

Identifies an Amazon Kinesis data stream as the streaming source. You provide the stream's Amazon Resource Name (ARN).

" + "documentation":"

Identifies a Kinesis data stream as the streaming source. You provide the stream's Amazon Resource Name (ARN).

" }, "KinesisStreamsInputDescription":{ "type":"structure", @@ -2194,7 +2196,7 @@ "documentation":"

The ARN of the IAM role that Kinesis Data Analytics can assume to access the stream.

Provided for backward compatibility. Applications that are created with the current API version have an application-level service execution role rather than a resource-level role.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the Kinesis data stream that is configured as the streaming source in the application input configuration.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the Kinesis data stream that is configured as the streaming source in the application input configuration.

" }, "KinesisStreamsInputUpdate":{ "type":"structure", @@ -2205,7 +2207,7 @@ "documentation":"

The Amazon Resource Name (ARN) of the input Kinesis data stream to read.

" } }, - "documentation":"

When you update the input configuration for an SQL-based Amazon Kinesis Data Analytics application, provides information about an Amazon Kinesis stream as the streaming source.

" + "documentation":"

When you update the input configuration for a SQL-based Kinesis Data Analytics application, provides information about a Kinesis stream as the streaming source.

" }, "KinesisStreamsOutput":{ "type":"structure", @@ -2216,7 +2218,7 @@ "documentation":"

The ARN of the destination Kinesis data stream to write to.

" } }, - "documentation":"

When you configure an SQL-based Amazon Kinesis Data Analytics application's output, identifies a Kinesis data stream as the destination. You provide the stream Amazon Resource Name (ARN).

" + "documentation":"

When you configure a SQL-based Kinesis Data Analytics application's output, identifies a Kinesis data stream as the destination. You provide the stream Amazon Resource Name (ARN).

" }, "KinesisStreamsOutputDescription":{ "type":"structure", @@ -2231,7 +2233,7 @@ "documentation":"

The ARN of the IAM role that Kinesis Data Analytics can assume to access the stream.

Provided for backward compatibility. Applications that are created with the current API version have an application-level service execution role rather than a resource-level role.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application's output, describes the Kinesis data stream that is configured as its destination.

" + "documentation":"

For an SQL-based Kinesis Data Analytics application's output, describes the Kinesis data stream that is configured as its destination.

" }, "KinesisStreamsOutputUpdate":{ "type":"structure", @@ -2242,7 +2244,7 @@ "documentation":"

The Amazon Resource Name (ARN) of the Kinesis data stream where you want to write the output.

" } }, - "documentation":"

When you update an SQL-based Amazon Kinesis Data Analytics application's output configuration using the UpdateApplication operation, provides information about a Kinesis data stream that is configured as the destination.

" + "documentation":"

When you update a SQL-based Kinesis Data Analytics application's output configuration using the UpdateApplication operation, provides information about a Kinesis data stream that is configured as the destination.

" }, "LambdaOutput":{ "type":"structure", @@ -2250,10 +2252,10 @@ "members":{ "ResourceARN":{ "shape":"ResourceARN", - "documentation":"

The Amazon Resource Name (ARN) of the destination Lambda function to write to.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" + "documentation":"

The Amazon Resource Name (ARN) of the destination Lambda function to write to.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" } }, - "documentation":"

When you configure an SQL-based Amazon Kinesis Data Analytics application's output, identifies an AWS Lambda function as the destination. You provide the function Amazon Resource Name (ARN) of the Lambda function.

" + "documentation":"

When you configure a SQL-based Kinesis Data Analytics application's output, identifies an AWS Lambda function as the destination. You provide the function Amazon Resource Name (ARN) of the Lambda function.

" }, "LambdaOutputDescription":{ "type":"structure", @@ -2268,7 +2270,7 @@ "documentation":"

The ARN of the IAM role that Kinesis Data Analytics can assume to write to the destination function.

Provided for backward compatibility. Applications that are created with the current API version have an application-level service execution role rather than a resource-level role.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application output, describes the AWS Lambda function that is configured as its destination.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application's output, describes the AWS Lambda function that is configured as its destination.

" }, "LambdaOutputUpdate":{ "type":"structure", @@ -2276,10 +2278,10 @@ "members":{ "ResourceARNUpdate":{ "shape":"ResourceARN", - "documentation":"

The Amazon Resource Name (ARN) of the destination AWS Lambda function.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" + "documentation":"

The Amazon Resource Name (ARN) of the destination AWS Lambda function.

To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the Lambda function ARN. For more information about Lambda ARNs, see Example ARNs: AWS Lambda

" } }, - "documentation":"

When you update an SQL-based Amazon Kinesis Data Analytics application's output configuration using the UpdateApplication operation, provides information about an AWS Lambda function that is configured as the destination.

" + "documentation":"

When you update an SQL-based Kinesis Data Analytics application's output configuration using the UpdateApplication operation, provides information about an AWS Lambda function that is configured as the destination.

" }, "LimitExceededException":{ "type":"structure", @@ -2403,7 +2405,7 @@ "documentation":"

Provides additional mapping information when the record format uses delimiters (for example, CSV).

" } }, - "documentation":"

When you configure an SQL-based Amazon Kinesis Data Analytics application's input at the time of creating or updating an application, provides additional mapping information specific to the record format (such as JSON, CSV, or record fields delimited by some delimiter) on the streaming source.

" + "documentation":"

When you configure a SQL-based Kinesis Data Analytics application's input at the time of creating or updating an application, provides additional mapping information specific to the record format (such as JSON, CSV, or record fields delimited by some delimiter) on the streaming source.

" }, "MetricsLevel":{ "type":"string", @@ -2428,14 +2430,14 @@ }, "MetricsLevel":{ "shape":"MetricsLevel", - "documentation":"

Describes the granularity of the CloudWatch Logs for an application.

" + "documentation":"

Describes the granularity of the CloudWatch Logs for an application. The Parallelism level is not recommended for applications with a Parallelism over 64 due to excessive costs.

" }, "LogLevel":{ "shape":"LogLevel", "documentation":"

Describes the verbosity of the CloudWatch Logs for an application.

" } }, - "documentation":"

Describes configuration parameters for Amazon CloudWatch logging for a Java-based Kinesis Data Analytics application. For more information about CloudWatch logging, see Monitoring.

" + "documentation":"

Describes configuration parameters for Amazon CloudWatch logging for a Flink-based Kinesis Data Analytics application. For more information about CloudWatch logging, see Monitoring.

" }, "MonitoringConfigurationDescription":{ "type":"structure", @@ -2453,7 +2455,7 @@ "documentation":"

Describes the verbosity of the CloudWatch Logs for an application.

" } }, - "documentation":"

Describes configuration parameters for CloudWatch logging for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes configuration parameters for CloudWatch logging for a Flink-based Kinesis Data Analytics application.

" }, "MonitoringConfigurationUpdate":{ "type":"structure", @@ -2464,14 +2466,14 @@ }, "MetricsLevelUpdate":{ "shape":"MetricsLevel", - "documentation":"

Describes updates to the granularity of the CloudWatch Logs for an application.

" + "documentation":"

Describes updates to the granularity of the CloudWatch Logs for an application. The Parallelism level is not recommended for applications with a Parallelism over 64 due to excessive costs.

" }, "LogLevelUpdate":{ "shape":"LogLevel", "documentation":"

Describes updates to the verbosity of the CloudWatch Logs for an application.

" } }, - "documentation":"

Describes updates to configuration parameters for Amazon CloudWatch logging for a Java-based Kinesis Data Analytics application.

" + "documentation":"

Describes updates to configuration parameters for Amazon CloudWatch logging for a Flink-based Kinesis Data Analytics application.

" }, "NextToken":{ "type":"string", @@ -2496,11 +2498,11 @@ }, "KinesisStreamsOutput":{ "shape":"KinesisStreamsOutput", - "documentation":"

Identifies an Amazon Kinesis data stream as the destination.

" + "documentation":"

Identifies a Kinesis data stream as the destination.

" }, "KinesisFirehoseOutput":{ "shape":"KinesisFirehoseOutput", - "documentation":"

Identifies an Amazon Kinesis Data Firehose delivery stream as the destination.

" + "documentation":"

Identifies a Kinesis Data Firehose delivery stream as the destination.

" }, "LambdaOutput":{ "shape":"LambdaOutput", @@ -2511,7 +2513,7 @@ "documentation":"

Describes the data format when records are written to the destination.

" } }, - "documentation":"

Describes an SQL-based Amazon Kinesis Data Analytics application's output configuration, in which you identify an in-application stream and a destination where you want the in-application stream data to be written. The destination can be a Kinesis data stream or a Kinesis Data Firehose delivery stream.

" + "documentation":"

Describes a SQL-based Kinesis Data Analytics application's output configuration, in which you identify an in-application stream and a destination where you want the in-application stream data to be written. The destination can be a Kinesis data stream or a Kinesis Data Firehose delivery stream.

" }, "OutputDescription":{ "type":"structure", @@ -2541,7 +2543,7 @@ "documentation":"

The data format used for writing data to the destination.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the application output configuration, which includes the in-application stream name and the destination where the stream data is written. The destination can be a Kinesis data stream or a Kinesis Data Firehose delivery stream.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the application output configuration, which includes the in-application stream name and the destination where the stream data is written. The destination can be a Kinesis data stream or a Kinesis Data Firehose delivery stream.

" }, "OutputDescriptions":{ "type":"list", @@ -2576,7 +2578,7 @@ "documentation":"

Describes the data format when records are written to the destination.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes updates to the output configuration identified by the OutputId.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes updates to the output configuration identified by the OutputId.

" }, "OutputUpdates":{ "type":"list", @@ -2600,18 +2602,18 @@ }, "Parallelism":{ "shape":"Parallelism", - "documentation":"

Describes the initial number of parallel tasks that a Java-based Kinesis Data Analytics application can perform. If AutoScalingEnabled is set to True, Kinesis Data Analytics increases the CurrentParallelism value in response to application load. The service can increase the CurrentParallelism value up to the maximum parallelism, which is ParalellismPerKPU times the maximum KPUs for the application. The maximum KPUs for an application is 32 by default, and can be increased by requesting a limit increase. If application load is reduced, the service can reduce the CurrentParallelism value down to the Parallelism setting.

" + "documentation":"

Describes the initial number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform. If AutoScalingEnabled is set to True, Kinesis Data Analytics increases the CurrentParallelism value in response to application load. The service can increase the CurrentParallelism value up to the maximum parallelism, which is ParalellismPerKPU times the maximum KPUs for the application. The maximum KPUs for an application is 32 by default, and can be increased by requesting a limit increase. If application load is reduced, the service can reduce the CurrentParallelism value down to the Parallelism setting.

" }, "ParallelismPerKPU":{ "shape":"ParallelismPerKPU", - "documentation":"

Describes the number of parallel tasks that a Java-based Kinesis Data Analytics application can perform per Kinesis Processing Unit (KPU) used by the application. For more information about KPUs, see Amazon Kinesis Data Analytics Pricing.

" + "documentation":"

Describes the number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform per Kinesis Processing Unit (KPU) used by the application. For more information about KPUs, see Amazon Kinesis Data Analytics Pricing.

" }, "AutoScalingEnabled":{ "shape":"BooleanObject", "documentation":"

Describes whether the Kinesis Data Analytics service can increase the parallelism of the application in response to increased throughput.

" } }, - "documentation":"

Describes parameters for how a Java-based Amazon Kinesis Data Analytics application executes multiple tasks simultaneously. For more information about parallelism, see Parallel Execution in the Apache Flink Documentation.

" + "documentation":"

Describes parameters for how a Flink-based Kinesis Data Analytics application application executes multiple tasks simultaneously. For more information about parallelism, see Parallel Execution in the Apache Flink Documentation.

" }, "ParallelismConfigurationDescription":{ "type":"structure", @@ -2622,22 +2624,22 @@ }, "Parallelism":{ "shape":"Parallelism", - "documentation":"

Describes the initial number of parallel tasks that a Java-based Kinesis Data Analytics application can perform. If AutoScalingEnabled is set to True, then Kinesis Data Analytics can increase the CurrentParallelism value in response to application load. The service can increase CurrentParallelism up to the maximum parallelism, which is ParalellismPerKPU times the maximum KPUs for the application. The maximum KPUs for an application is 32 by default, and can be increased by requesting a limit increase. If application load is reduced, the service can reduce the CurrentParallelism value down to the Parallelism setting.

" + "documentation":"

Describes the initial number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform. If AutoScalingEnabled is set to True, then Kinesis Data Analytics can increase the CurrentParallelism value in response to application load. The service can increase CurrentParallelism up to the maximum parallelism, which is ParalellismPerKPU times the maximum KPUs for the application. The maximum KPUs for an application is 32 by default, and can be increased by requesting a limit increase. If application load is reduced, the service can reduce the CurrentParallelism value down to the Parallelism setting.

" }, "ParallelismPerKPU":{ "shape":"ParallelismPerKPU", - "documentation":"

Describes the number of parallel tasks that a Java-based Kinesis Data Analytics application can perform per Kinesis Processing Unit (KPU) used by the application.

" + "documentation":"

Describes the number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform per Kinesis Processing Unit (KPU) used by the application.

" }, "CurrentParallelism":{ "shape":"Parallelism", - "documentation":"

Describes the current number of parallel tasks that a Java-based Kinesis Data Analytics application can perform. If AutoScalingEnabled is set to True, Kinesis Data Analytics can increase this value in response to application load. The service can increase this value up to the maximum parallelism, which is ParalellismPerKPU times the maximum KPUs for the application. The maximum KPUs for an application is 32 by default, and can be increased by requesting a limit increase. If application load is reduced, the service can reduce the CurrentParallelism value down to the Parallelism setting.

" + "documentation":"

Describes the current number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform. If AutoScalingEnabled is set to True, Kinesis Data Analytics can increase this value in response to application load. The service can increase this value up to the maximum parallelism, which is ParalellismPerKPU times the maximum KPUs for the application. The maximum KPUs for an application is 32 by default, and can be increased by requesting a limit increase. If application load is reduced, the service can reduce the CurrentParallelism value down to the Parallelism setting.

" }, "AutoScalingEnabled":{ "shape":"BooleanObject", "documentation":"

Describes whether the Kinesis Data Analytics service can increase the parallelism of the application in response to increased throughput.

" } }, - "documentation":"

Describes parameters for how a Java-based Kinesis Data Analytics application executes multiple tasks simultaneously.

" + "documentation":"

Describes parameters for how a Flink-based Kinesis Data Analytics application executes multiple tasks simultaneously.

" }, "ParallelismConfigurationUpdate":{ "type":"structure", @@ -2659,7 +2661,7 @@ "documentation":"

Describes updates to whether the Kinesis Data Analytics service can increase the parallelism of the application in response to increased throughput.

" } }, - "documentation":"

Describes updates to parameters for how a Java-based Kinesis Data Analytics application executes multiple tasks simultaneously.

" + "documentation":"

Describes updates to parameters for how a Flink-based Kinesis Data Analytics application executes multiple tasks simultaneously.

" }, "ParallelismPerKPU":{ "type":"integer", @@ -2695,7 +2697,7 @@ "documentation":"

Describes the value of an application execution property key-value pair.

" } }, - "documentation":"

Property key-value pairs passed into a Java-based Kinesis Data Analytics application.

" + "documentation":"

Property key-value pairs passed into a Flink-based Kinesis Data Analytics application.

" }, "PropertyGroups":{ "type":"list", @@ -2744,7 +2746,7 @@ "documentation":"

The type of column created in the in-application input stream or reference table.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the mapping of each data element in the streaming source to the corresponding column in the in-application stream.

Also used to describe the format of the reference data source.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the mapping of each data element in the streaming source to the corresponding column in the in-application stream.

Also used to describe the format of the reference data source.

" }, "RecordColumnDelimiter":{ "type":"string", @@ -2792,7 +2794,7 @@ "documentation":"

When you configure application input at the time of creating or updating an application, provides additional mapping information specific to the record format (such as JSON, CSV, or record fields delimited by some delimiter) on the streaming source.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the record format and relevant mapping information that should be applied to schematize the records on the stream.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the record format and relevant mapping information that should be applied to schematize the records on the stream.

" }, "RecordFormatType":{ "type":"string", @@ -2832,7 +2834,7 @@ "documentation":"

Describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the reference data source by providing the source information (Amazon S3 bucket name and object key name), the resulting in-application table name that is created, and the necessary schema to map the data elements in the Amazon S3 object to the in-application table.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the reference data source by providing the source information (Amazon S3 bucket name and object key name), the resulting in-application table name that is created, and the necessary schema to map the data elements in the Amazon S3 object to the in-application table.

" }, "ReferenceDataSourceDescription":{ "type":"structure", @@ -2859,7 +2861,7 @@ "documentation":"

Describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the reference data source configured for an application.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the reference data source configured for an application.

" }, "ReferenceDataSourceDescriptions":{ "type":"list", @@ -2886,7 +2888,7 @@ "documentation":"

Describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream.

" } }, - "documentation":"

When you update a reference data source configuration for a SQL-based Amazon Kinesis Data Analytics application, this object provides all the updated values (such as the source bucket name and object key name), the in-application table name that is created, and updated mapping information that maps the data in the Amazon S3 object to the in-application reference table that is created.

" + "documentation":"

When you update a reference data source configuration for a SQL-based Kinesis Data Analytics application, this object provides all the updated values (such as the source bucket name and object key name), the in-application table name that is created, and updated mapping information that maps the data in the Amazon S3 object to the in-application reference table that is created.

" }, "ReferenceDataSourceUpdates":{ "type":"list", @@ -2923,7 +2925,7 @@ "members":{ "Message":{"shape":"ErrorMessage"} }, - "documentation":"

Discovery failed to get a record from the streaming source because of the Amazon Kinesis Streams ProvisionedThroughputExceededException. For more information, see GetRecords in the Amazon Kinesis Streams API Reference.

", + "documentation":"

Discovery failed to get a record from the streaming source because of the Kinesis Streams ProvisionedThroughputExceededException. For more information, see GetRecords in the Amazon Kinesis Streams API Reference.

", "exception":true }, "RoleARN":{ @@ -2937,18 +2939,18 @@ "members":{ "FlinkRunConfiguration":{ "shape":"FlinkRunConfiguration", - "documentation":"

Describes the starting parameters for an Apache Flink-based Kinesis Data Analytics application.

" + "documentation":"

Describes the starting parameters for a Flink-based Kinesis Data Analytics application.

" }, "SqlRunConfigurations":{ "shape":"SqlRunConfigurations", - "documentation":"

Describes the starting parameters for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

Describes the starting parameters for a SQL-based Kinesis Data Analytics application application.

" }, "ApplicationRestoreConfiguration":{ "shape":"ApplicationRestoreConfiguration", "documentation":"

Describes the restore behavior of a restarting application.

" } }, - "documentation":"

Describes the starting parameters for an Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes the starting parameters for an Kinesis Data Analytics application.

" }, "RunConfigurationDescription":{ "type":"structure", @@ -2966,7 +2968,7 @@ "members":{ "FlinkRunConfiguration":{ "shape":"FlinkRunConfiguration", - "documentation":"

Describes the starting parameters for an Apache Flink-based Kinesis Data Analytics application.

" + "documentation":"

Describes the starting parameters for a Flink-based Kinesis Data Analytics application.

" }, "ApplicationRestoreConfiguration":{ "shape":"ApplicationRestoreConfiguration", @@ -3003,7 +3005,7 @@ "documentation":"

The version of the object containing the application code.

" } }, - "documentation":"

Describes the location of a Java-based Amazon Kinesis Data Analytics application's code stored in an S3 bucket.

" + "documentation":"

Describes the location of a Flink-based Kinesis Data Analytics application's code stored in an S3 bucket.

" }, "S3Configuration":{ "type":"structure", @@ -3021,7 +3023,7 @@ "documentation":"

The name of the object that contains the data.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, provides a description of an Amazon S3 data source, including the Amazon Resource Name (ARN) of the S3 bucket and the name of the Amazon S3 object that contains the data.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, provides a description of an Amazon S3 data source, including the Amazon Resource Name (ARN) of the S3 bucket and the name of the Amazon S3 object that contains the data.

" }, "S3ContentLocation":{ "type":"structure", @@ -3043,7 +3045,7 @@ "documentation":"

The version of the object containing the application code.

" } }, - "documentation":"

For a Java-based Amazon Kinesis Data Analytics application, provides a description of an Amazon S3 object, including the Amazon Resource Name (ARN) of the S3 bucket, the name of the Amazon S3 object that contains the data, and the version number of the Amazon S3 object that contains the data.

" + "documentation":"

For a Flink-based Kinesis Data Analytics application, provides a description of an Amazon S3 object, including the Amazon Resource Name (ARN) of the S3 bucket, the name of the Amazon S3 object that contains the data, and the version number of the Amazon S3 object that contains the data.

" }, "S3ContentLocationUpdate":{ "type":"structure", @@ -3061,7 +3063,7 @@ "documentation":"

The new version of the object containing the application code.

" } }, - "documentation":"

Describes an update for the Amazon S3 code content location for a Java-based Amazon Kinesis Data Analytics application.

" + "documentation":"

Describes an update for the Amazon S3 code content location for a Flink-based Kinesis Data Analytics application.

" }, "S3ReferenceDataSource":{ "type":"structure", @@ -3075,7 +3077,7 @@ "documentation":"

The object key name containing the reference data.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, identifies the Amazon S3 bucket and object that contains the reference data.

A Kinesis Data Analytics application loads reference data only once. If the data changes, you call the UpdateApplication operation to trigger reloading of data into your application.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, identifies the Amazon S3 bucket and object that contains the reference data.

A Kinesis Data Analytics application loads reference data only once. If the data changes, you call the UpdateApplication operation to trigger reloading of data into your application.

" }, "S3ReferenceDataSourceDescription":{ "type":"structure", @@ -3097,7 +3099,7 @@ "documentation":"

The ARN of the IAM role that Kinesis Data Analytics can assume to read the Amazon S3 object on your behalf to populate the in-application reference table.

Provided for backward compatibility. Applications that are created with the current API version have an application-level service execution role rather than a resource-level role.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, provides the bucket name and object key name that stores the reference data.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, provides the bucket name and object key name that stores the reference data.

" }, "S3ReferenceDataSourceUpdate":{ "type":"structure", @@ -3111,7 +3113,7 @@ "documentation":"

The object key name.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the Amazon S3 bucket name and object key name for an in-application reference table.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the Amazon S3 bucket name and object key name for an in-application reference table.

" }, "SecurityGroupId":{"type":"string"}, "SecurityGroupIds":{ @@ -3195,7 +3197,7 @@ "documentation":"

A list of RecordColumn objects.

" } }, - "documentation":"

For an SQL-based Amazon Kinesis Data Analytics application, describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream.

" + "documentation":"

For a SQL-based Kinesis Data Analytics application, describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream.

" }, "SqlApplicationConfiguration":{ "type":"structure", @@ -3213,7 +3215,7 @@ "documentation":"

The array of ReferenceDataSource objects describing the reference data sources used by the application.

" } }, - "documentation":"

Describes the inputs, outputs, and reference data sources for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

Describes the inputs, outputs, and reference data sources for a SQL-based Kinesis Data Analytics application.

" }, "SqlApplicationConfigurationDescription":{ "type":"structure", @@ -3231,7 +3233,7 @@ "documentation":"

The array of ReferenceDataSourceDescription objects describing the reference data sources used by the application.

" } }, - "documentation":"

Describes the inputs, outputs, and reference data sources for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

Describes the inputs, outputs, and reference data sources for a SQL-based Kinesis Data Analytics application.

" }, "SqlApplicationConfigurationUpdate":{ "type":"structure", @@ -3249,7 +3251,7 @@ "documentation":"

The array of ReferenceDataSourceUpdate objects describing the new reference data sources used by the application.

" } }, - "documentation":"

Describes updates to the input streams, destination streams, and reference data sources for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

Describes updates to the input streams, destination streams, and reference data sources for a SQL-based Kinesis Data Analytics application.

" }, "SqlRunConfiguration":{ "type":"structure", @@ -3267,7 +3269,7 @@ "documentation":"

The point at which you want the application to start processing records from the streaming source.

" } }, - "documentation":"

Describes the starting parameters for an SQL-based Kinesis Data Analytics application.

" + "documentation":"

Describes the starting parameters for a SQL-based Kinesis Data Analytics application.

" }, "SqlRunConfigurations":{ "type":"list", @@ -3302,6 +3304,10 @@ "ApplicationName":{ "shape":"ApplicationName", "documentation":"

The name of the running application to stop.

" + }, + "Force":{ + "shape":"BooleanObject", + "documentation":"

Set to true to force the application to stop. If you set Force to true, Kinesis Data Analytics stops the application without taking a snapshot.

You can only force stop a Flink-based Kinesis Data Analytics application. You can't force stop a SQL-based Kinesis Data Analytics application.

The application must be in the STARTING, UPDATING, STOPPING, AUTOSCALING, or RUNNING state.

" } } }, @@ -3403,7 +3409,7 @@ "documentation":"

Stream data that was modified by the processor specified in the InputProcessingConfiguration parameter.

" } }, - "documentation":"

The data format is not valid. Amazon Kinesis Data Analytics cannot detect the schema for the given streaming source.

", + "documentation":"

The data format is not valid. Kinesis Data Analytics cannot detect the schema for the given streaming source.

", "exception":true }, "UnsupportedOperationException":{ @@ -3423,7 +3429,7 @@ "members":{ "ResourceARN":{ "shape":"KinesisAnalyticsARN", - "documentation":"

The ARN of the Kinesis Analytics application from which to remove the tags.

" + "documentation":"

The ARN of the Kinesis Data Analytics application from which to remove the tags.

" }, "TagKeys":{ "shape":"TagKeys", @@ -3563,5 +3569,5 @@ "min":0 } }, - "documentation":"

Amazon Kinesis Data Analytics is a fully managed service that you can use to process and analyze streaming data using SQL or Java. The service enables you to quickly author and run SQL or Java code against streaming sources to perform time series analytics, feed real-time dashboards, and create real-time metrics.

" + "documentation":"

Amazon Kinesis Data Analytics is a fully managed service that you can use to process and analyze streaming data using Java, SQL, or Scala. The service enables you to quickly author and run Java, SQL, or Scala code against streaming sources to perform time series analytics, feed real-time dashboards, and create real-time metrics.

" } diff --git a/botocore/data/marketplace-catalog/2018-09-17/service-2.json b/botocore/data/marketplace-catalog/2018-09-17/service-2.json index 103515df6d..f627fd44d6 100644 --- a/botocore/data/marketplace-catalog/2018-09-17/service-2.json +++ b/botocore/data/marketplace-catalog/2018-09-17/service-2.json @@ -243,6 +243,10 @@ "EntityIdList":{ "shape":"ResourceIdList", "documentation":"

This object is a list of entity IDs (string) that are a part of a change set. The entity ID list is a maximum of 20 entities. It must contain at least one entity.

" + }, + "FailureCode":{ + "shape":"FailureCode", + "documentation":"

Returned if the change set is in FAILED status. Can be either CLIENT_ERROR, which means that there are issues with the request (see the ErrorDetailList of DescribeChangeSet), or SERVER_FAULT, which means that there is a problem in the system, and you should retry your request.

" } }, "documentation":"

A summary of a change set returned in a list of change sets when the ListChangeSets action is called.

" @@ -345,6 +349,10 @@ "shape":"ChangeStatus", "documentation":"

The status of the change request.

" }, + "FailureCode":{ + "shape":"FailureCode", + "documentation":"

Returned if the change set is in FAILED status. Can be either CLIENT_ERROR, which means that there are issues with the request (see the ErrorDetailList), or SERVER_FAULT, which means that there is a problem in the system, and you should retry your request.

" + }, "FailureDescription":{ "shape":"StringValue", "documentation":"

Returned if there is a failure on the change set, but that failure is not related to any of the changes in the request.

" @@ -474,6 +482,13 @@ "type":"list", "member":{"shape":"ErrorDetail"} }, + "FailureCode":{ + "type":"string", + "enum":[ + "CLIENT_ERROR", + "SERVER_FAULT" + ] + }, "Filter":{ "type":"structure", "members":{ @@ -768,5 +783,5 @@ "min":1 } }, - "documentation":"

Catalog API actions allow you to manage your entities through list, describe, and update capabilities. An entity can be a product or an offer on AWS Marketplace.

You can automate your entity update process by integrating the AWS Marketplace Catalog API with your AWS Marketplace product build or deployment pipelines. You can also create your own applications on top of the Catalog API to manage your products on AWS Marketplace.

" + "documentation":"

Catalog API actions allow you to manage your entities through list, describe, and update capabilities. An entity can be a product or an offer on AWS Marketplace.

You can automate your entity update process by integrating the AWS Marketplace Catalog API with your AWS Marketplace product build or deployment pipelines. You can also create your own applications on top of the Catalog API to manage your products on AWS Marketplace.

" } From ad5e549817274d1460f888e68852582a0c6daa2b Mon Sep 17 00:00:00 2001 From: aws-sdk-python-automation Date: Tue, 6 Oct 2020 18:13:21 +0000 Subject: [PATCH 2/2] Bumping version to 1.18.13 --- .changes/1.18.13.json | 22 +++++++++++++++++++ .../next-release/api-change-dms-93546.json | 5 ----- .../next-release/api-change-ec2-82257.json | 5 ----- .../api-change-kinesisanalyticsv2-4210.json | 5 ----- .../api-change-marketplacecatalog-18838.json | 5 ----- CHANGELOG.rst | 9 ++++++++ botocore/__init__.py | 2 +- docs/source/conf.py | 2 +- 8 files changed, 33 insertions(+), 22 deletions(-) create mode 100644 .changes/1.18.13.json delete mode 100644 .changes/next-release/api-change-dms-93546.json delete mode 100644 .changes/next-release/api-change-ec2-82257.json delete mode 100644 .changes/next-release/api-change-kinesisanalyticsv2-4210.json delete mode 100644 .changes/next-release/api-change-marketplacecatalog-18838.json diff --git a/.changes/1.18.13.json b/.changes/1.18.13.json new file mode 100644 index 0000000000..9cf9a0648e --- /dev/null +++ b/.changes/1.18.13.json @@ -0,0 +1,22 @@ +[ + { + "category": "``dms``", + "description": "Update dms client to latest version", + "type": "api-change" + }, + { + "category": "``kinesisanalyticsv2``", + "description": "Update kinesisanalyticsv2 client to latest version", + "type": "api-change" + }, + { + "category": "``marketplace-catalog``", + "description": "Update marketplace-catalog client to latest version", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "Update ec2 client to latest version", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/next-release/api-change-dms-93546.json b/.changes/next-release/api-change-dms-93546.json deleted file mode 100644 index e3fcc8fc21..0000000000 --- a/.changes/next-release/api-change-dms-93546.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "category": "``dms``", - "type": "api-change", - "description": "Update dms client to latest version" -} diff --git a/.changes/next-release/api-change-ec2-82257.json b/.changes/next-release/api-change-ec2-82257.json deleted file mode 100644 index c936aeaa44..0000000000 --- a/.changes/next-release/api-change-ec2-82257.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "category": "``ec2``", - "type": "api-change", - "description": "Update ec2 client to latest version" -} diff --git a/.changes/next-release/api-change-kinesisanalyticsv2-4210.json b/.changes/next-release/api-change-kinesisanalyticsv2-4210.json deleted file mode 100644 index 16d785d834..0000000000 --- a/.changes/next-release/api-change-kinesisanalyticsv2-4210.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "category": "``kinesisanalyticsv2``", - "type": "api-change", - "description": "Update kinesisanalyticsv2 client to latest version" -} diff --git a/.changes/next-release/api-change-marketplacecatalog-18838.json b/.changes/next-release/api-change-marketplacecatalog-18838.json deleted file mode 100644 index 87ebfd0667..0000000000 --- a/.changes/next-release/api-change-marketplacecatalog-18838.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "category": "``marketplace-catalog``", - "type": "api-change", - "description": "Update marketplace-catalog client to latest version" -} diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7d7bfba403..40b4100138 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,15 @@ CHANGELOG ========= +1.18.13 +======= + +* api-change:``dms``: Update dms client to latest version +* api-change:``kinesisanalyticsv2``: Update kinesisanalyticsv2 client to latest version +* api-change:``marketplace-catalog``: Update marketplace-catalog client to latest version +* api-change:``ec2``: Update ec2 client to latest version + + 1.18.12 ======= diff --git a/botocore/__init__.py b/botocore/__init__.py index 9f1615ce28..84b1e61bc8 100644 --- a/botocore/__init__.py +++ b/botocore/__init__.py @@ -16,7 +16,7 @@ import re import logging -__version__ = '1.18.12' +__version__ = '1.18.13' class NullHandler(logging.Handler): diff --git a/docs/source/conf.py b/docs/source/conf.py index af7ebfe2cf..e659a697af 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -54,7 +54,7 @@ # The short X.Y version. version = '1.18.' # The full version, including alpha/beta/rc tags. -release = '1.18.12' +release = '1.18.13' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages.