diff --git a/CHANGELOG.md b/CHANGELOG.md index 487943197e6..6458ce7de49 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +Release v1.46.2 (2023-10-23) +=== + +### Service Client Updates +* `service/marketplacecommerceanalytics`: Updates service API and documentation + * The StartSupportDataExport operation has been deprecated as part of the Product Support Connection deprecation. As of December 2022, Product Support Connection is no longer supported. +* `service/networkmanager`: Updates service API and documentation +* `service/redshift-serverless`: Updates service API and documentation +* `service/rekognition`: Updates service API, documentation, paginators, and examples + * Amazon Rekognition introduces StartMediaAnalysisJob, GetMediaAnalysisJob, and ListMediaAnalysisJobs operations to run a bulk analysis of images with a Detect Moderation model. + Release v1.46.1 (2023-10-20) === diff --git a/aws/endpoints/defaults.go b/aws/endpoints/defaults.go index c555c965927..05eb8622952 100644 --- a/aws/endpoints/defaults.go +++ b/aws/endpoints/defaults.go @@ -4492,6 +4492,14 @@ var awsPartition = partition{ Region: "ap-southeast-1", }, }, + endpointKey{ + Region: "bedrock-eu-central-1", + }: endpoint{ + Hostname: "bedrock.eu-central-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "eu-central-1", + }, + }, endpointKey{ Region: "bedrock-fips-us-east-1", }: endpoint{ @@ -4524,6 +4532,14 @@ var awsPartition = partition{ Region: "ap-southeast-1", }, }, + endpointKey{ + Region: "bedrock-runtime-eu-central-1", + }: endpoint{ + Hostname: "bedrock-runtime.eu-central-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "eu-central-1", + }, + }, endpointKey{ Region: "bedrock-runtime-fips-us-east-1", }: endpoint{ @@ -4572,6 +4588,9 @@ var awsPartition = partition{ Region: "us-west-2", }, }, + endpointKey{ + Region: "eu-central-1", + }: endpoint{}, endpointKey{ Region: "us-east-1", }: endpoint{}, @@ -15712,12 +15731,45 @@ var awsPartition = partition{ }, "iottwinmaker": service{ Endpoints: serviceEndpoints{ + endpointKey{ + Region: "ap-northeast-1", + }: endpoint{}, + endpointKey{ + Region: "ap-northeast-2", + }: endpoint{}, + endpointKey{ + Region: "ap-south-1", + }: endpoint{}, endpointKey{ Region: "ap-southeast-1", }: endpoint{}, endpointKey{ Region: "ap-southeast-2", }: endpoint{}, + endpointKey{ + Region: "api-ap-northeast-1", + }: endpoint{ + Hostname: "api.iottwinmaker.ap-northeast-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ap-northeast-1", + }, + }, + endpointKey{ + Region: "api-ap-northeast-2", + }: endpoint{ + Hostname: "api.iottwinmaker.ap-northeast-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ap-northeast-2", + }, + }, + endpointKey{ + Region: "api-ap-south-1", + }: endpoint{ + Hostname: "api.iottwinmaker.ap-south-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ap-south-1", + }, + }, endpointKey{ Region: "api-ap-southeast-1", }: endpoint{ @@ -15766,6 +15818,30 @@ var awsPartition = partition{ Region: "us-west-2", }, }, + endpointKey{ + Region: "data-ap-northeast-1", + }: endpoint{ + Hostname: "data.iottwinmaker.ap-northeast-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ap-northeast-1", + }, + }, + endpointKey{ + Region: "data-ap-northeast-2", + }: endpoint{ + Hostname: "data.iottwinmaker.ap-northeast-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ap-northeast-2", + }, + }, + endpointKey{ + Region: "data-ap-south-1", + }: endpoint{ + Hostname: "data.iottwinmaker.ap-south-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ap-south-1", + }, + }, endpointKey{ Region: "data-ap-southeast-1", }: endpoint{ diff --git a/aws/version.go b/aws/version.go index f9f634d40d3..9c69d71d877 100644 --- a/aws/version.go +++ b/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.46.1" +const SDKVersion = "1.46.2" diff --git a/models/apis/marketplacecommerceanalytics/2015-07-01/api-2.json b/models/apis/marketplacecommerceanalytics/2015-07-01/api-2.json index 55f9c4577b1..99be9726a5e 100644 --- a/models/apis/marketplacecommerceanalytics/2015-07-01/api-2.json +++ b/models/apis/marketplacecommerceanalytics/2015-07-01/api-2.json @@ -35,7 +35,9 @@ "output":{"shape":"StartSupportDataExportResult"}, "errors":[ {"shape":"MarketplaceCommerceAnalyticsException"} - ] + ], + "deprecated":true, + "deprecatedMessage":"This target has been deprecated. As of December 2022 Product Support Connection is no longer supported." } }, "shapes":{ @@ -82,6 +84,7 @@ }, "DestinationS3BucketName":{ "type":"string", + "max":63, "min":1 }, "DestinationS3Prefix":{"type":"string"}, @@ -132,10 +135,12 @@ }, "RoleNameArn":{ "type":"string", + "max":2048, "min":1 }, "SnsTopicArn":{ "type":"string", + "max":256, "min":1 }, "StartSupportDataExportRequest":{ @@ -155,13 +160,17 @@ "destinationS3Prefix":{"shape":"DestinationS3Prefix"}, "snsTopicArn":{"shape":"SnsTopicArn"}, "customerDefinedValues":{"shape":"CustomerDefinedValues"} - } + }, + "deprecated":true, + "deprecatedMessage":"This target has been deprecated. As of December 2022 Product Support Connection is no longer supported." }, "StartSupportDataExportResult":{ "type":"structure", "members":{ "dataSetRequestId":{"shape":"DataSetRequestId"} - } + }, + "deprecated":true, + "deprecatedMessage":"This target has been deprecated. As of December 2022 Product Support Connection is no longer supported." }, "SupportDataSetType":{ "type":"string", diff --git a/models/apis/marketplacecommerceanalytics/2015-07-01/docs-2.json b/models/apis/marketplacecommerceanalytics/2015-07-01/docs-2.json index e61a647ee00..4d61dc3d090 100644 --- a/models/apis/marketplacecommerceanalytics/2015-07-01/docs-2.json +++ b/models/apis/marketplacecommerceanalytics/2015-07-01/docs-2.json @@ -3,14 +3,14 @@ "service": "Provides AWS Marketplace business intelligence data on-demand.", "operations": { "GenerateDataSet": "Given a data set type and data set publication date, asynchronously publishes the requested data set to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data_set_type}_YYYY-MM-DD.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy.", - "StartSupportDataExport": "Given a data set type and a from date, asynchronously publishes the requested customer support data to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data_set_type}_YYYY-MM-DD'T'HH-mm-ss'Z'.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy." + "StartSupportDataExport": "This target has been deprecated. Given a data set type and a from date, asynchronously publishes the requested customer support data to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data_set_type}_YYYY-MM-DD'T'HH-mm-ss'Z'.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy." }, "shapes": { "CustomerDefinedValues": { "base": null, "refs": { "GenerateDataSetRequest$customerDefinedValues": "(Optional) Key-value pairs which will be returned, unmodified, in the Amazon SNS notification message and the data set metadata file. These key-value pairs can be used to correlated responses with tracking information from other systems.", - "StartSupportDataExportRequest$customerDefinedValues": "(Optional) Key-value pairs which will be returned, unmodified, in the Amazon SNS notification message and the data set metadata file." + "StartSupportDataExportRequest$customerDefinedValues": "This target has been deprecated. (Optional) Key-value pairs which will be returned, unmodified, in the Amazon SNS notification message and the data set metadata file." } }, "DataSetPublicationDate": { @@ -23,7 +23,7 @@ "base": null, "refs": { "GenerateDataSetResult$dataSetRequestId": "A unique identifier representing a specific request to the GenerateDataSet operation. This identifier can be used to correlate a request with notifications from the SNS topic.", - "StartSupportDataExportResult$dataSetRequestId": "A unique identifier representing a specific request to the StartSupportDataExport operation. This identifier can be used to correlate a request with notifications from the SNS topic." + "StartSupportDataExportResult$dataSetRequestId": "This target has been deprecated. A unique identifier representing a specific request to the StartSupportDataExport operation. This identifier can be used to correlate a request with notifications from the SNS topic." } }, "DataSetType": { @@ -36,14 +36,14 @@ "base": null, "refs": { "GenerateDataSetRequest$destinationS3BucketName": "The name (friendly name, not ARN) of the destination S3 bucket.", - "StartSupportDataExportRequest$destinationS3BucketName": "The name (friendly name, not ARN) of the destination S3 bucket." + "StartSupportDataExportRequest$destinationS3BucketName": "This target has been deprecated. The name (friendly name, not ARN) of the destination S3 bucket." } }, "DestinationS3Prefix": { "base": null, "refs": { "GenerateDataSetRequest$destinationS3Prefix": "(Optional) The desired S3 prefix for the published data set, similar to a directory path in standard file systems. For example, if given the bucket name \"mybucket\" and the prefix \"myprefix/mydatasets\", the output file \"outputfile\" would be published to \"s3://mybucket/myprefix/mydatasets/outputfile\". If the prefix directory structure does not exist, it will be created. If no prefix is provided, the data set will be published to the S3 bucket root.", - "StartSupportDataExportRequest$destinationS3Prefix": "(Optional) The desired S3 prefix for the published data set, similar to a directory path in standard file systems. For example, if given the bucket name \"mybucket\" and the prefix \"myprefix/mydatasets\", the output file \"outputfile\" would be published to \"s3://mybucket/myprefix/mydatasets/outputfile\". If the prefix directory structure does not exist, it will be created. If no prefix is provided, the data set will be published to the S3 bucket root." + "StartSupportDataExportRequest$destinationS3Prefix": "This target has been deprecated. (Optional) The desired S3 prefix for the published data set, similar to a directory path in standard file systems. For example, if given the bucket name \"mybucket\" and the prefix \"myprefix/mydatasets\", the output file \"outputfile\" would be published to \"s3://mybucket/myprefix/mydatasets/outputfile\". If the prefix directory structure does not exist, it will be created. If no prefix is provided, the data set will be published to the S3 bucket root." } }, "ExceptionMessage": { @@ -55,7 +55,7 @@ "FromDate": { "base": null, "refs": { - "StartSupportDataExportRequest$fromDate": "The start date from which to retrieve the data set in UTC. This parameter only affects the customer_support_contacts_data data set type." + "StartSupportDataExportRequest$fromDate": "This target has been deprecated. The start date from which to retrieve the data set in UTC. This parameter only affects the customer_support_contacts_data data set type." } }, "GenerateDataSetRequest": { @@ -89,30 +89,30 @@ "base": null, "refs": { "GenerateDataSetRequest$roleNameArn": "The Amazon Resource Name (ARN) of the Role with an attached permissions policy to interact with the provided AWS services.", - "StartSupportDataExportRequest$roleNameArn": "The Amazon Resource Name (ARN) of the Role with an attached permissions policy to interact with the provided AWS services." + "StartSupportDataExportRequest$roleNameArn": "This target has been deprecated. The Amazon Resource Name (ARN) of the Role with an attached permissions policy to interact with the provided AWS services." } }, "SnsTopicArn": { "base": null, "refs": { "GenerateDataSetRequest$snsTopicArn": "Amazon Resource Name (ARN) for the SNS Topic that will be notified when the data set has been published or if an error has occurred.", - "StartSupportDataExportRequest$snsTopicArn": "Amazon Resource Name (ARN) for the SNS Topic that will be notified when the data set has been published or if an error has occurred." + "StartSupportDataExportRequest$snsTopicArn": "This target has been deprecated. Amazon Resource Name (ARN) for the SNS Topic that will be notified when the data set has been published or if an error has occurred." } }, "StartSupportDataExportRequest": { - "base": "Container for the parameters to the StartSupportDataExport operation.", + "base": "This target has been deprecated. Container for the parameters to the StartSupportDataExport operation.", "refs": { } }, "StartSupportDataExportResult": { - "base": "Container for the result of the StartSupportDataExport operation.", + "base": "This target has been deprecated. Container for the result of the StartSupportDataExport operation.", "refs": { } }, "SupportDataSetType": { "base": null, "refs": { - "StartSupportDataExportRequest$dataSetType": "

Specifies the data set type to be written to the output csv file. The data set types customer_support_contacts_data and test_customer_support_contacts_data both result in a csv file containing the following fields: Product Id, Product Code, Customer Guid, Subscription Guid, Subscription Start Date, Organization, AWS Account Id, Given Name, Surname, Telephone Number, Email, Title, Country Code, ZIP Code, Operation Type, and Operation Time.

" + "StartSupportDataExportRequest$dataSetType": "

This target has been deprecated. Specifies the data set type to be written to the output csv file. The data set types customer_support_contacts_data and test_customer_support_contacts_data both result in a csv file containing the following fields: Product Id, Product Code, Customer Guid, Subscription Guid, Subscription Start Date, Organization, AWS Account Id, Given Name, Surname, Telephone Number, Email, Title, Country Code, ZIP Code, Operation Type, and Operation Time.

" } } } diff --git a/models/apis/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json b/models/apis/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json new file mode 100644 index 00000000000..a8b4c02892c --- /dev/null +++ b/models/apis/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json @@ -0,0 +1,314 @@ +{ + "version": "1.0", + "parameters": { + "Region": { + "builtIn": "AWS::Region", + "required": false, + "documentation": "The AWS region used to dispatch the request.", + "type": "String" + }, + "UseDualStack": { + "builtIn": "AWS::UseDualStack", + "required": true, + "default": false, + "documentation": "When true, use the dual-stack endpoint. If the configured endpoint does not support dual-stack, dispatching the request MAY return an error.", + "type": "Boolean" + }, + "UseFIPS": { + "builtIn": "AWS::UseFIPS", + "required": true, + "default": false, + "documentation": "When true, send this request to the FIPS-compliant regional endpoint. If the configured endpoint does not have a FIPS compliant endpoint, dispatching the request will return an error.", + "type": "Boolean" + }, + "Endpoint": { + "builtIn": "SDK::Endpoint", + "required": false, + "documentation": "Override the endpoint used to send this request", + "type": "String" + } + }, + "rules": [ + { + "conditions": [ + { + "fn": "isSet", + "argv": [ + { + "ref": "Endpoint" + } + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + } + ], + "error": "Invalid Configuration: FIPS and custom endpoint are not supported", + "type": "error" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", + "type": "error" + }, + { + "conditions": [], + "endpoint": { + "url": { + "ref": "Endpoint" + }, + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "isSet", + "argv": [ + { + "ref": "Region" + } + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "aws.partition", + "argv": [ + { + "ref": "Region" + } + ], + "assign": "PartitionResult" + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsFIPS" + ] + } + ] + }, + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsDualStack" + ] + } + ] + } + ], + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "FIPS and DualStack are enabled, but this partition does not support one or both", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsFIPS" + ] + }, + true + ] + } + ], + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "FIPS is enabled but this partition does not support FIPS", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsDualStack" + ] + } + ] + } + ], + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://marketplacecommerceanalytics.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "DualStack is enabled but this partition does not support DualStack", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [], + "endpoint": { + "url": "https://marketplacecommerceanalytics.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "Invalid Configuration: Missing Region", + "type": "error" + } + ] +} \ No newline at end of file diff --git a/models/apis/marketplacecommerceanalytics/2015-07-01/endpoint-tests-1.json b/models/apis/marketplacecommerceanalytics/2015-07-01/endpoint-tests-1.json new file mode 100644 index 00000000000..628e634634a --- /dev/null +++ b/models/apis/marketplacecommerceanalytics/2015-07-01/endpoint-tests-1.json @@ -0,0 +1,314 @@ +{ + "testCases": [ + { + "documentation": "For region us-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.us-east-1.amazonaws.com" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.us-east-1.api.aws" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region us-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.us-east-1.amazonaws.com" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.us-east-1.api.aws" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region cn-north-1 with FIPS enabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.cn-north-1.api.amazonwebservices.com.cn" + } + }, + "params": { + "Region": "cn-north-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region cn-north-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.cn-north-1.amazonaws.com.cn" + } + }, + "params": { + "Region": "cn-north-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region cn-north-1 with FIPS disabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.cn-north-1.api.amazonwebservices.com.cn" + } + }, + "params": { + "Region": "cn-north-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region cn-north-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.cn-north-1.amazonaws.com.cn" + } + }, + "params": { + "Region": "cn-north-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-gov-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.us-gov-east-1.api.aws" + } + }, + "params": { + "Region": "us-gov-east-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region us-gov-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.us-gov-east-1.amazonaws.com" + } + }, + "params": { + "Region": "us-gov-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-gov-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.us-gov-east-1.api.aws" + } + }, + "params": { + "Region": "us-gov-east-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region us-gov-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.us-gov-east-1.amazonaws.com" + } + }, + "params": { + "Region": "us-gov-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.us-iso-east-1.c2s.ic.gov" + } + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.us-iso-east-1.c2s.ic.gov" + } + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics-fips.us-isob-east-1.sc2s.sgov.gov" + } + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://marketplacecommerceanalytics.us-isob-east-1.sc2s.sgov.gov" + } + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For custom endpoint with region set and fips disabled and dualstack disabled", + "expect": { + "endpoint": { + "url": "https://example.com" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": false, + "UseDualStack": false, + "Endpoint": "https://example.com" + } + }, + { + "documentation": "For custom endpoint with region not set and fips disabled and dualstack disabled", + "expect": { + "endpoint": { + "url": "https://example.com" + } + }, + "params": { + "UseFIPS": false, + "UseDualStack": false, + "Endpoint": "https://example.com" + } + }, + { + "documentation": "For custom endpoint with fips enabled and dualstack disabled", + "expect": { + "error": "Invalid Configuration: FIPS and custom endpoint are not supported" + }, + "params": { + "Region": "us-east-1", + "UseFIPS": true, + "UseDualStack": false, + "Endpoint": "https://example.com" + } + }, + { + "documentation": "For custom endpoint with fips disabled and dualstack enabled", + "expect": { + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" + }, + "params": { + "Region": "us-east-1", + "UseFIPS": false, + "UseDualStack": true, + "Endpoint": "https://example.com" + } + }, + { + "documentation": "Missing region", + "expect": { + "error": "Invalid Configuration: Missing Region" + } + } + ], + "version": "1.0" +} \ No newline at end of file diff --git a/models/apis/networkmanager/2019-07-05/api-2.json b/models/apis/networkmanager/2019-07-05/api-2.json index 6ee60ebe81e..57b11d5af6a 100644 --- a/models/apis/networkmanager/2019-07-05/api-2.json +++ b/models/apis/networkmanager/2019-07-05/api-2.json @@ -1734,7 +1734,8 @@ "State":{"shape":"ConnectPeerState"}, "CreatedAt":{"shape":"DateTime"}, "Configuration":{"shape":"ConnectPeerConfiguration"}, - "Tags":{"shape":"TagList"} + "Tags":{"shape":"TagList"}, + "SubnetArn":{"shape":"SubnetArn"} } }, "ConnectPeerAssociation":{ @@ -1811,7 +1812,8 @@ "EdgeLocation":{"shape":"ExternalRegionCode"}, "ConnectPeerState":{"shape":"ConnectPeerState"}, "CreatedAt":{"shape":"DateTime"}, - "Tags":{"shape":"TagList"} + "Tags":{"shape":"TagList"}, + "SubnetArn":{"shape":"SubnetArn"} } }, "ConnectPeerSummaryList":{ @@ -2129,8 +2131,7 @@ "type":"structure", "required":[ "ConnectAttachmentId", - "PeerAddress", - "InsideCidrBlocks" + "PeerAddress" ], "members":{ "ConnectAttachmentId":{"shape":"AttachmentId"}, @@ -2142,7 +2143,8 @@ "ClientToken":{ "shape":"ClientToken", "idempotencyToken":true - } + }, + "SubnetArn":{"shape":"SubnetArn"} } }, "CreateConnectPeerResponse":{ @@ -4844,7 +4846,10 @@ }, "TunnelProtocol":{ "type":"string", - "enum":["GRE"] + "enum":[ + "GRE", + "NO_ENCAP" + ] }, "UntagResourceRequest":{ "type":"structure", diff --git a/models/apis/networkmanager/2019-07-05/docs-2.json b/models/apis/networkmanager/2019-07-05/docs-2.json index bf260968896..5cbf6dc2d92 100644 --- a/models/apis/networkmanager/2019-07-05/docs-2.json +++ b/models/apis/networkmanager/2019-07-05/docs-2.json @@ -2385,6 +2385,9 @@ "base": null, "refs": { "AWSLocation$SubnetArn": "

The Amazon Resource Name (ARN) of the subnet that the device is located in.

", + "ConnectPeer$SubnetArn": "

The subnet ARN for the Connect peer.

", + "ConnectPeerSummary$SubnetArn": "

The subnet ARN for the Connect peer summary.

", + "CreateConnectPeerRequest$SubnetArn": "

The subnet ARN for the Connect peer.

", "SubnetArnList$member": null } }, @@ -2578,7 +2581,7 @@ "base": null, "refs": { "CreateTransitGatewayRouteTableAttachmentRequest$TransitGatewayRouteTableArn": "

The ARN of the transit gateway route table for the attachment request. For example, \"TransitGatewayRouteTableArn\": \"arn:aws:ec2:us-west-2:123456789012:transit-gateway-route-table/tgw-rtb-9876543210123456\".

", - "RouteTableIdentifier$TransitGatewayRouteTableArn": "

The ARN of the transit gateway route table.

", + "RouteTableIdentifier$TransitGatewayRouteTableArn": "

The ARN of the transit gateway route table for the attachment request. For example, \"TransitGatewayRouteTableArn\": \"arn:aws:ec2:us-west-2:123456789012:transit-gateway-route-table/tgw-rtb-9876543210123456\".

", "TransitGatewayRouteTableAttachment$TransitGatewayRouteTableArn": "

The ARN of the transit gateway attachment route table. For example, \"TransitGatewayRouteTableArn\": \"arn:aws:ec2:us-west-2:123456789012:transit-gateway-route-table/tgw-rtb-9876543210123456\".

" } }, diff --git a/models/apis/networkmanager/2019-07-05/endpoint-rule-set-1.json b/models/apis/networkmanager/2019-07-05/endpoint-rule-set-1.json index 6cbba0f7080..9be341714f3 100644 --- a/models/apis/networkmanager/2019-07-05/endpoint-rule-set-1.json +++ b/models/apis/networkmanager/2019-07-05/endpoint-rule-set-1.json @@ -40,7 +40,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -58,785 +57,360 @@ "type": "error" }, { - "conditions": [], - "type": "tree", - "rules": [ + "conditions": [ { - "conditions": [ + "fn": "booleanEquals", + "argv": [ { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true - ] - } - ], - "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", - "type": "error" - }, - { - "conditions": [], - "endpoint": { - "url": { - "ref": "Endpoint" + "ref": "UseDualStack" }, - "properties": {}, - "headers": {} - }, - "type": "endpoint" + true + ] } - ] + ], + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", + "type": "error" + }, + { + "conditions": [], + "endpoint": { + "url": { + "ref": "Endpoint" + }, + "properties": {}, + "headers": {} + }, + "type": "endpoint" } - ] + ], + "type": "tree" }, { - "conditions": [], - "type": "tree", + "conditions": [ + { + "fn": "isSet", + "argv": [ + { + "ref": "Region" + } + ] + } + ], "rules": [ { "conditions": [ { - "fn": "isSet", + "fn": "aws.partition", "argv": [ { "ref": "Region" } - ] + ], + "assign": "PartitionResult" } ], - "type": "tree", "rules": [ { "conditions": [ { - "fn": "aws.partition", + "fn": "stringEquals", "argv": [ { - "ref": "Region" - } - ], - "assign": "PartitionResult" - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "stringEquals", + "fn": "getAttr", "argv": [ { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "name" - ] + "ref": "PartitionResult" }, - "aws" + "name" ] + }, + "aws" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://networkmanager.us-west-2.amazonaws.com", + "properties": { + "authSchemes": [ + { + "name": "sigv4", + "signingName": "networkmanager", + "signingRegion": "us-west-2" } - ], - "type": "tree", - "rules": [ + ] + }, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseFIPS" - }, - true - ] - }, - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true - ] - } - ], - "type": "tree", - "rules": [ + "fn": "getAttr", + "argv": [ { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsFIPS" - ] - } - ] - }, - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsDualStack" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager-fips.{Region}.api.aws", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] + "ref": "PartitionResult" }, - { - "conditions": [], - "error": "FIPS and DualStack are enabled, but this partition does not support one or both", - "type": "error" - } + "name" ] }, + "aws-us-gov" + ] + }, + { + "fn": "booleanEquals", + "argv": [ { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseFIPS" - }, - true - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsFIPS" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager-fips.{Region}.amazonaws.com", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - }, - { - "conditions": [], - "error": "FIPS is enabled but this partition does not support FIPS", - "type": "error" - } - ] + "ref": "UseFIPS" }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsDualStack" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager.{Region}.api.aws", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - }, - { - "conditions": [], - "error": "DualStack is enabled but this partition does not support DualStack", - "type": "error" - } - ] + "ref": "UseDualStack" }, + false + ] + } + ], + "endpoint": { + "url": "https://networkmanager.us-gov-west-1.amazonaws.com", + "properties": { + "authSchemes": [ { - "conditions": [], - "endpoint": { - "url": "https://networkmanager.us-west-2.amazonaws.com", - "properties": { - "authSchemes": [ - { - "name": "sigv4", - "signingName": "networkmanager", - "signingRegion": "us-west-2" - } - ] - }, - "headers": {} - }, - "type": "endpoint" + "name": "sigv4", + "signingName": "networkmanager", + "signingRegion": "us-gov-west-1" } ] }, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "rules": [ { "conditions": [ { - "fn": "stringEquals", + "fn": "booleanEquals", "argv": [ + true, { "fn": "getAttr", "argv": [ { "ref": "PartitionResult" }, - "name" - ] - }, - "aws-us-gov" - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseFIPS" - }, - true - ] - }, - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true + "supportsFIPS" ] } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsFIPS" - ] - } - ] - }, - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsDualStack" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager-fips.{Region}.api.aws", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - }, - { - "conditions": [], - "error": "FIPS and DualStack are enabled, but this partition does not support one or both", - "type": "error" - } ] }, { - "conditions": [ + "fn": "booleanEquals", + "argv": [ + true, { - "fn": "booleanEquals", + "fn": "getAttr", "argv": [ { - "ref": "UseFIPS" + "ref": "PartitionResult" }, - true + "supportsDualStack" ] } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsFIPS" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager-fips.{Region}.amazonaws.com", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - }, - { - "conditions": [], - "error": "FIPS is enabled but this partition does not support FIPS", - "type": "error" - } ] - }, - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsDualStack" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager.{Region}.api.aws", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - }, - { - "conditions": [], - "error": "DualStack is enabled but this partition does not support DualStack", - "type": "error" - } - ] - }, + } + ], + "rules": [ { "conditions": [], "endpoint": { - "url": "https://networkmanager.us-gov-west-1.amazonaws.com", - "properties": { - "authSchemes": [ - { - "name": "sigv4", - "signingName": "networkmanager", - "signingRegion": "us-gov-west-1" - } - ] - }, + "url": "https://networkmanager-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, "headers": {} }, "type": "endpoint" } - ] + ], + "type": "tree" }, { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseFIPS" - }, - true - ] - }, - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true - ] - } - ], - "type": "tree", - "rules": [ + "conditions": [], + "error": "FIPS and DualStack are enabled, but this partition does not support one or both", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsFIPS" - ] - } - ] - }, - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsDualStack" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - } - ] + "ref": "UseFIPS" }, - { - "conditions": [], - "error": "FIPS and DualStack are enabled, but this partition does not support one or both", - "type": "error" - } + true ] - }, + } + ], + "rules": [ { "conditions": [ { "fn": "booleanEquals", "argv": [ { - "ref": "UseFIPS" - }, - true - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", + "fn": "getAttr", "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsFIPS" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "type": "tree", - "rules": [ { - "conditions": [], - "endpoint": { - "url": "https://networkmanager-fips.{Region}.{PartitionResult#dnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } + "ref": "PartitionResult" + }, + "supportsFIPS" ] - } - ] - }, - { - "conditions": [], - "error": "FIPS is enabled but this partition does not support FIPS", - "type": "error" - } - ] - }, - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" }, true ] } ], - "type": "tree", "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - true, - { - "fn": "getAttr", - "argv": [ - { - "ref": "PartitionResult" - }, - "supportsDualStack" - ] - } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://networkmanager.{Region}.{PartitionResult#dualStackDnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - } - ] - }, { "conditions": [], - "error": "DualStack is enabled but this partition does not support DualStack", - "type": "error" + "endpoint": { + "url": "https://networkmanager-fips.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], - "type": "tree", - "rules": [ + "error": "FIPS is enabled but this partition does not support FIPS", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ { - "conditions": [ - { - "fn": "stringEquals", - "argv": [ - { - "ref": "Region" - }, - "aws-global" - ] - } - ], - "endpoint": { - "url": "https://networkmanager.us-west-2.amazonaws.com", - "properties": { - "authSchemes": [ - { - "name": "sigv4", - "signingName": "networkmanager", - "signingRegion": "us-west-2" - } - ] - }, - "headers": {} - }, - "type": "endpoint" + "ref": "UseDualStack" }, + true + ] + } + ], + "rules": [ + { + "conditions": [ { - "conditions": [ + "fn": "booleanEquals", + "argv": [ + true, { - "fn": "stringEquals", + "fn": "getAttr", "argv": [ { - "ref": "Region" + "ref": "PartitionResult" }, - "aws-us-gov-global" + "supportsDualStack" ] } - ], - "endpoint": { - "url": "https://networkmanager.us-gov-west-1.amazonaws.com", - "properties": { - "authSchemes": [ - { - "name": "sigv4", - "signingName": "networkmanager", - "signingRegion": "us-gov-west-1" - } - ] - }, - "headers": {} - }, - "type": "endpoint" - }, + ] + } + ], + "rules": [ { "conditions": [], "endpoint": { - "url": "https://networkmanager.{Region}.{PartitionResult#dnsSuffix}", + "url": "https://networkmanager.{Region}.{PartitionResult#dualStackDnsSuffix}", "properties": {}, "headers": {} }, "type": "endpoint" } - ] + ], + "type": "tree" + }, + { + "conditions": [], + "error": "DualStack is enabled but this partition does not support DualStack", + "type": "error" } - ] + ], + "type": "tree" + }, + { + "conditions": [], + "endpoint": { + "url": "https://networkmanager.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" } - ] - }, - { - "conditions": [], - "error": "Invalid Configuration: Missing Region", - "type": "error" + ], + "type": "tree" } - ] + ], + "type": "tree" + }, + { + "conditions": [], + "error": "Invalid Configuration: Missing Region", + "type": "error" } ] } \ No newline at end of file diff --git a/models/apis/networkmanager/2019-07-05/endpoint-tests-1.json b/models/apis/networkmanager/2019-07-05/endpoint-tests-1.json index b182162fd65..06545870edf 100644 --- a/models/apis/networkmanager/2019-07-05/endpoint-tests-1.json +++ b/models/apis/networkmanager/2019-07-05/endpoint-tests-1.json @@ -17,9 +17,9 @@ } }, "params": { + "Region": "aws-global", "UseFIPS": false, - "UseDualStack": false, - "Region": "aws-global" + "UseDualStack": false } }, { @@ -30,9 +30,9 @@ } }, "params": { + "Region": "us-east-1", "UseFIPS": true, - "UseDualStack": true, - "Region": "us-east-1" + "UseDualStack": true } }, { @@ -43,9 +43,9 @@ } }, "params": { + "Region": "us-east-1", "UseFIPS": true, - "UseDualStack": false, - "Region": "us-east-1" + "UseDualStack": false } }, { @@ -56,9 +56,9 @@ } }, "params": { + "Region": "us-east-1", "UseFIPS": false, - "UseDualStack": true, - "Region": "us-east-1" + "UseDualStack": true } }, { @@ -78,9 +78,9 @@ } }, "params": { + "Region": "us-east-1", "UseFIPS": false, - "UseDualStack": false, - "Region": "us-east-1" + "UseDualStack": false } }, { @@ -91,9 +91,9 @@ } }, "params": { + "Region": "cn-north-1", "UseFIPS": true, - "UseDualStack": true, - "Region": "cn-north-1" + "UseDualStack": true } }, { @@ -104,9 +104,9 @@ } }, "params": { + "Region": "cn-north-1", "UseFIPS": true, - "UseDualStack": false, - "Region": "cn-north-1" + "UseDualStack": false } }, { @@ -117,9 +117,9 @@ } }, "params": { + "Region": "cn-north-1", "UseFIPS": false, - "UseDualStack": true, - "Region": "cn-north-1" + "UseDualStack": true } }, { @@ -130,9 +130,9 @@ } }, "params": { + "Region": "cn-north-1", "UseFIPS": false, - "UseDualStack": false, - "Region": "cn-north-1" + "UseDualStack": false } }, { @@ -152,9 +152,9 @@ } }, "params": { + "Region": "aws-us-gov-global", "UseFIPS": false, - "UseDualStack": false, - "Region": "aws-us-gov-global" + "UseDualStack": false } }, { @@ -165,9 +165,9 @@ } }, "params": { + "Region": "us-gov-east-1", "UseFIPS": true, - "UseDualStack": true, - "Region": "us-gov-east-1" + "UseDualStack": true } }, { @@ -178,9 +178,9 @@ } }, "params": { + "Region": "us-gov-east-1", "UseFIPS": true, - "UseDualStack": false, - "Region": "us-gov-east-1" + "UseDualStack": false } }, { @@ -191,9 +191,9 @@ } }, "params": { + "Region": "us-gov-east-1", "UseFIPS": false, - "UseDualStack": true, - "Region": "us-gov-east-1" + "UseDualStack": true } }, { @@ -213,9 +213,20 @@ } }, "params": { + "Region": "us-gov-east-1", "UseFIPS": false, - "UseDualStack": false, - "Region": "us-gov-east-1" + "UseDualStack": false + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": true, + "UseDualStack": true } }, { @@ -226,9 +237,20 @@ } }, "params": { + "Region": "us-iso-east-1", "UseFIPS": true, - "UseDualStack": false, - "Region": "us-iso-east-1" + "UseDualStack": false + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": false, + "UseDualStack": true } }, { @@ -239,9 +261,20 @@ } }, "params": { + "Region": "us-iso-east-1", "UseFIPS": false, - "UseDualStack": false, - "Region": "us-iso-east-1" + "UseDualStack": false + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": true, + "UseDualStack": true } }, { @@ -252,9 +285,20 @@ } }, "params": { + "Region": "us-isob-east-1", "UseFIPS": true, - "UseDualStack": false, - "Region": "us-isob-east-1" + "UseDualStack": false + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": false, + "UseDualStack": true } }, { @@ -265,9 +309,9 @@ } }, "params": { + "Region": "us-isob-east-1", "UseFIPS": false, - "UseDualStack": false, - "Region": "us-isob-east-1" + "UseDualStack": false } }, { @@ -278,9 +322,9 @@ } }, "params": { + "Region": "us-east-1", "UseFIPS": false, "UseDualStack": false, - "Region": "us-east-1", "Endpoint": "https://example.com" } }, @@ -303,9 +347,9 @@ "error": "Invalid Configuration: FIPS and custom endpoint are not supported" }, "params": { + "Region": "us-east-1", "UseFIPS": true, "UseDualStack": false, - "Region": "us-east-1", "Endpoint": "https://example.com" } }, @@ -315,11 +359,17 @@ "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" }, "params": { + "Region": "us-east-1", "UseFIPS": false, "UseDualStack": true, - "Region": "us-east-1", "Endpoint": "https://example.com" } + }, + { + "documentation": "Missing region", + "expect": { + "error": "Invalid Configuration: Missing Region" + } } ], "version": "1.0" diff --git a/models/apis/redshift-serverless/2021-04-21/api-2.json b/models/apis/redshift-serverless/2021-04-21/api-2.json index 53c801b1e84..812d60604df 100644 --- a/models/apis/redshift-serverless/2021-04-21/api-2.json +++ b/models/apis/redshift-serverless/2021-04-21/api-2.json @@ -1806,6 +1806,7 @@ "endpoint":{"shape":"Endpoint"}, "enhancedVpcRouting":{"shape":"Boolean"}, "namespaceName":{"shape":"String"}, + "patchVersion":{"shape":"String"}, "port":{"shape":"Integer"}, "publiclyAccessible":{"shape":"Boolean"}, "securityGroupIds":{"shape":"SecurityGroupIdList"}, @@ -1813,7 +1814,8 @@ "subnetIds":{"shape":"SubnetIdList"}, "workgroupArn":{"shape":"String"}, "workgroupId":{"shape":"String"}, - "workgroupName":{"shape":"WorkgroupName"} + "workgroupName":{"shape":"WorkgroupName"}, + "workgroupVersion":{"shape":"String"} } }, "WorkgroupList":{ diff --git a/models/apis/redshift-serverless/2021-04-21/docs-2.json b/models/apis/redshift-serverless/2021-04-21/docs-2.json index fc566ef32ea..a21d13753da 100644 --- a/models/apis/redshift-serverless/2021-04-21/docs-2.json +++ b/models/apis/redshift-serverless/2021-04-21/docs-2.json @@ -171,7 +171,7 @@ "refs": { "CreateNamespaceRequest$adminUserPassword": "

The password of the administrator for the first database created in the namespace.

You can't use adminUserPassword if manageAdminPassword is true.

", "GetCredentialsResponse$dbPassword": "

A temporary password that authorizes the user name returned by DbUser to log on to the database DbName.

", - "UpdateNamespaceRequest$adminUserPassword": "

The password of the administrator for the first database created in the namespace. This parameter must be updated together with adminUsername.

You can't use adminUserPassword if manageAdminPassword is true.

" + "UpdateNamespaceRequest$adminUserPassword": "

The password of the administrator for the first database created in the namespace. This parameter must be updated together with adminUsername.

You can't use adminUserPassword if manageAdminPassword is true.

" } }, "DbUser": { @@ -879,8 +879,10 @@ "VpcEndpoint$vpcId": "

The VPC identifier that the endpoint is associated with.

", "VpcSecurityGroupMembership$status": "

The status of the VPC security group.

", "Workgroup$namespaceName": "

The namespace the workgroup is associated with.

", + "Workgroup$patchVersion": "

The patch version of your Amazon Redshift Serverless workgroup. For more information about patch versions, see Cluster versions for Amazon Redshift.

", "Workgroup$workgroupArn": "

The Amazon Resource Name (ARN) that links to the workgroup.

", - "Workgroup$workgroupId": "

The unique identifier of the workgroup.

" + "Workgroup$workgroupId": "

The unique identifier of the workgroup.

", + "Workgroup$workgroupVersion": "

The Amazon Redshift Serverless version of your workgroup. For more information about Amazon Redshift Serverless versions, seeCluster versions for Amazon Redshift.

" } }, "SubnetId": { diff --git a/models/apis/redshift-serverless/2021-04-21/endpoint-rule-set-1.json b/models/apis/redshift-serverless/2021-04-21/endpoint-rule-set-1.json index 0bf166b4b28..8139b1f39ed 100644 --- a/models/apis/redshift-serverless/2021-04-21/endpoint-rule-set-1.json +++ b/models/apis/redshift-serverless/2021-04-21/endpoint-rule-set-1.json @@ -40,7 +40,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -83,7 +82,8 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -96,7 +96,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -110,7 +109,6 @@ "assign": "PartitionResult" } ], - "type": "tree", "rules": [ { "conditions": [ @@ -133,7 +131,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -168,7 +165,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [], @@ -179,14 +175,16 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "FIPS and DualStack are enabled, but this partition does not support one or both", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -200,14 +198,12 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ { "fn": "booleanEquals", "argv": [ - true, { "fn": "getAttr", "argv": [ @@ -216,11 +212,11 @@ }, "supportsFIPS" ] - } + }, + true ] } ], - "type": "tree", "rules": [ { "conditions": [], @@ -231,14 +227,16 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "FIPS is enabled but this partition does not support FIPS", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -252,7 +250,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -272,7 +269,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [], @@ -283,14 +279,16 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "DualStack is enabled but this partition does not support DualStack", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [], @@ -301,9 +299,11 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], diff --git a/models/apis/rekognition/2016-06-27/api-2.json b/models/apis/rekognition/2016-06-27/api-2.json index ad588d192d1..0f7bf58a706 100644 --- a/models/apis/rekognition/2016-06-27/api-2.json +++ b/models/apis/rekognition/2016-06-27/api-2.json @@ -714,6 +714,23 @@ {"shape":"ThrottlingException"} ] }, + "GetMediaAnalysisJob":{ + "name":"GetMediaAnalysisJob", + "http":{ + "method":"POST", + "requestUri":"/" + }, + "input":{"shape":"GetMediaAnalysisJobRequest"}, + "output":{"shape":"GetMediaAnalysisJobResponse"}, + "errors":[ + {"shape":"AccessDeniedException"}, + {"shape":"ResourceNotFoundException"}, + {"shape":"InternalServerError"}, + {"shape":"InvalidParameterException"}, + {"shape":"ProvisionedThroughputExceededException"}, + {"shape":"ThrottlingException"} + ] + }, "GetPersonTracking":{ "name":"GetPersonTracking", "http":{ @@ -865,6 +882,23 @@ {"shape":"ResourceNotFoundException"} ] }, + "ListMediaAnalysisJobs":{ + "name":"ListMediaAnalysisJobs", + "http":{ + "method":"POST", + "requestUri":"/" + }, + "input":{"shape":"ListMediaAnalysisJobsRequest"}, + "output":{"shape":"ListMediaAnalysisJobsResponse"}, + "errors":[ + {"shape":"AccessDeniedException"}, + {"shape":"InternalServerError"}, + {"shape":"InvalidParameterException"}, + {"shape":"InvalidPaginationTokenException"}, + {"shape":"ProvisionedThroughputExceededException"}, + {"shape":"ThrottlingException"} + ] + }, "ListProjectPolicies":{ "name":"ListProjectPolicies", "http":{ @@ -1157,6 +1191,29 @@ ], "idempotent":true }, + "StartMediaAnalysisJob":{ + "name":"StartMediaAnalysisJob", + "http":{ + "method":"POST", + "requestUri":"/" + }, + "input":{"shape":"StartMediaAnalysisJobRequest"}, + "output":{"shape":"StartMediaAnalysisJobResponse"}, + "errors":[ + {"shape":"InternalServerError"}, + {"shape":"AccessDeniedException"}, + {"shape":"InvalidParameterException"}, + {"shape":"InvalidManifestException"}, + {"shape":"InvalidS3ObjectException"}, + {"shape":"ResourceNotFoundException"}, + {"shape":"ResourceNotReadyException"}, + {"shape":"ProvisionedThroughputExceededException"}, + {"shape":"LimitExceededException"}, + {"shape":"ThrottlingException"}, + {"shape":"IdempotentParameterMismatchException"} + ], + "idempotent":true + }, "StartPersonTracking":{ "name":"StartPersonTracking", "http":{ @@ -2936,6 +2993,38 @@ "GetRequestMetadata":{"shape":"GetLabelDetectionRequestMetadata"} } }, + "GetMediaAnalysisJobRequest":{ + "type":"structure", + "required":["JobId"], + "members":{ + "JobId":{"shape":"MediaAnalysisJobId"} + } + }, + "GetMediaAnalysisJobResponse":{ + "type":"structure", + "required":[ + "JobId", + "OperationsConfig", + "Status", + "CreationTimestamp", + "Input", + "OutputConfig" + ], + "members":{ + "JobId":{"shape":"MediaAnalysisJobId"}, + "JobName":{"shape":"MediaAnalysisJobName"}, + "OperationsConfig":{"shape":"MediaAnalysisOperationsConfig"}, + "Status":{"shape":"MediaAnalysisJobStatus"}, + "FailureDetails":{"shape":"MediaAnalysisJobFailureDetails"}, + "CreationTimestamp":{"shape":"DateTime"}, + "CompletionTimestamp":{"shape":"DateTime"}, + "Input":{"shape":"MediaAnalysisInput"}, + "OutputConfig":{"shape":"MediaAnalysisOutputConfig"}, + "KmsKeyId":{"shape":"KmsKeyId"}, + "Results":{"shape":"MediaAnalysisResults"}, + "ManifestSummary":{"shape":"MediaAnalysisManifestSummary"} + } + }, "GetPersonTrackingRequest":{ "type":"structure", "required":["JobId"], @@ -3168,6 +3257,12 @@ }, "exception":true }, + "InvalidManifestException":{ + "type":"structure", + "members":{ + }, + "exception":true + }, "InvalidPaginationTokenException":{ "type":"structure", "members":{ @@ -3471,6 +3566,26 @@ "FaceModelVersion":{"shape":"String"} } }, + "ListMediaAnalysisJobsPageSize":{ + "type":"integer", + "max":100, + "min":1 + }, + "ListMediaAnalysisJobsRequest":{ + "type":"structure", + "members":{ + "NextToken":{"shape":"ExtendedPaginationToken"}, + "MaxResults":{"shape":"ListMediaAnalysisJobsPageSize"} + } + }, + "ListMediaAnalysisJobsResponse":{ + "type":"structure", + "required":["MediaAnalysisJobs"], + "members":{ + "NextToken":{"shape":"ExtendedPaginationToken"}, + "MediaAnalysisJobs":{"shape":"MediaAnalysisJobDescriptions"} + } + }, "ListProjectPoliciesPageSize":{ "type":"integer", "max":5, @@ -3611,6 +3726,123 @@ "max":500, "min":1 }, + "MediaAnalysisDetectModerationLabelsConfig":{ + "type":"structure", + "members":{ + "MinConfidence":{"shape":"Percent"}, + "ProjectVersion":{"shape":"ProjectVersionId"} + } + }, + "MediaAnalysisInput":{ + "type":"structure", + "required":["S3Object"], + "members":{ + "S3Object":{"shape":"S3Object"} + } + }, + "MediaAnalysisJobDescription":{ + "type":"structure", + "required":[ + "JobId", + "OperationsConfig", + "Status", + "CreationTimestamp", + "Input", + "OutputConfig" + ], + "members":{ + "JobId":{"shape":"MediaAnalysisJobId"}, + "JobName":{"shape":"MediaAnalysisJobName"}, + "OperationsConfig":{"shape":"MediaAnalysisOperationsConfig"}, + "Status":{"shape":"MediaAnalysisJobStatus"}, + "FailureDetails":{"shape":"MediaAnalysisJobFailureDetails"}, + "CreationTimestamp":{"shape":"DateTime"}, + "CompletionTimestamp":{"shape":"DateTime"}, + "Input":{"shape":"MediaAnalysisInput"}, + "OutputConfig":{"shape":"MediaAnalysisOutputConfig"}, + "KmsKeyId":{"shape":"KmsKeyId"}, + "Results":{"shape":"MediaAnalysisResults"}, + "ManifestSummary":{"shape":"MediaAnalysisManifestSummary"} + } + }, + "MediaAnalysisJobDescriptions":{ + "type":"list", + "member":{"shape":"MediaAnalysisJobDescription"} + }, + "MediaAnalysisJobFailureCode":{ + "type":"string", + "enum":[ + "INTERNAL_ERROR", + "INVALID_S3_OBJECT", + "INVALID_MANIFEST", + "INVALID_OUTPUT_CONFIG", + "INVALID_KMS_KEY", + "ACCESS_DENIED", + "RESOURCE_NOT_FOUND", + "RESOURCE_NOT_READY", + "THROTTLED" + ] + }, + "MediaAnalysisJobFailureDetails":{ + "type":"structure", + "members":{ + "Code":{"shape":"MediaAnalysisJobFailureCode"}, + "Message":{"shape":"String"} + } + }, + "MediaAnalysisJobId":{ + "type":"string", + "max":64, + "min":1, + "pattern":"^[a-zA-Z0-9-_]+$" + }, + "MediaAnalysisJobName":{ + "type":"string", + "max":64, + "min":1, + "pattern":"[a-zA-Z0-9_.\\-]+" + }, + "MediaAnalysisJobStatus":{ + "type":"string", + "enum":[ + "CREATED", + "QUEUED", + "IN_PROGRESS", + "SUCCEEDED", + "FAILED" + ] + }, + "MediaAnalysisManifestSummary":{ + "type":"structure", + "members":{ + "S3Object":{"shape":"S3Object"} + } + }, + "MediaAnalysisOperationsConfig":{ + "type":"structure", + "members":{ + "DetectModerationLabels":{"shape":"MediaAnalysisDetectModerationLabelsConfig"} + } + }, + "MediaAnalysisOutputConfig":{ + "type":"structure", + "required":["S3Bucket"], + "members":{ + "S3Bucket":{"shape":"S3Bucket"}, + "S3KeyPrefix":{"shape":"MediaAnalysisS3KeyPrefix"} + } + }, + "MediaAnalysisResults":{ + "type":"structure", + "members":{ + "S3Object":{"shape":"S3Object"} + } + }, + "MediaAnalysisS3KeyPrefix":{ + "type":"string", + "max":800, + "pattern":"\\S*" + }, "MinCoveragePercentage":{ "type":"float", "max":100, @@ -4388,6 +4620,32 @@ "JobId":{"shape":"JobId"} } }, + "StartMediaAnalysisJobRequest":{ + "type":"structure", + "required":[ + "OperationsConfig", + "Input", + "OutputConfig" + ], + "members":{ + "ClientRequestToken":{ + "shape":"ClientRequestToken", + "idempotencyToken":true + }, + "JobName":{"shape":"MediaAnalysisJobName"}, + "OperationsConfig":{"shape":"MediaAnalysisOperationsConfig"}, + "Input":{"shape":"MediaAnalysisInput"}, + "OutputConfig":{"shape":"MediaAnalysisOutputConfig"}, + "KmsKeyId":{"shape":"KmsKeyId"} + } + }, + "StartMediaAnalysisJobResponse":{ + "type":"structure", + "required":["JobId"], + "members":{ + "JobId":{"shape":"MediaAnalysisJobId"} + } + }, "StartPersonTrackingRequest":{ "type":"structure", "required":["Video"], diff --git a/models/apis/rekognition/2016-06-27/docs-2.json b/models/apis/rekognition/2016-06-27/docs-2.json index 902c5425e18..8b26c05f867 100644 --- a/models/apis/rekognition/2016-06-27/docs-2.json +++ b/models/apis/rekognition/2016-06-27/docs-2.json @@ -40,6 +40,7 @@ "GetFaceLivenessSessionResults": "

Retrieves the results of a specific Face Liveness session. It requires the sessionId as input, which was created using CreateFaceLivenessSession. Returns the corresponding Face Liveness confidence score, a reference image that includes a face bounding box, and audit images that also contain face bounding boxes. The Face Liveness confidence score ranges from 0 to 100.

The number of audit images returned by GetFaceLivenessSessionResults is defined by the AuditImagesLimit paramater when calling CreateFaceLivenessSession. Reference images are always returned when possible.

", "GetFaceSearch": "

Gets the face search results for Amazon Rekognition Video face search started by StartFaceSearch. The search returns faces in a collection that match the faces of persons detected in a video. It also includes the time(s) that faces are matched in the video.

Face search in a video is an asynchronous operation. You start face search by calling to StartFaceSearch which returns a job identifier (JobId). When the search operation finishes, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic registered in the initial call to StartFaceSearch. To get the search results, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetFaceSearch and pass the job identifier (JobId) from the initial call to StartFaceSearch.

For more information, see Searching Faces in a Collection in the Amazon Rekognition Developer Guide.

The search results are retured in an array, Persons, of PersonMatch objects. EachPersonMatch element contains details about the matching faces in the input collection, person information (facial attributes, bounding boxes, and person identifer) for the matched person, and the time the person was matched in the video.

GetFaceSearch only returns the default facial attributes (BoundingBox, Confidence, Landmarks, Pose, and Quality). The other facial attributes listed in the Face object of the following response syntax are not returned. For more information, see FaceDetail in the Amazon Rekognition Developer Guide.

By default, the Persons array is sorted by the time, in milliseconds from the start of the video, persons are matched. You can also sort by persons by specifying INDEX for the SORTBY input parameter.

", "GetLabelDetection": "

Gets the label detection results of a Amazon Rekognition Video analysis started by StartLabelDetection.

The label detection operation is started by a call to StartLabelDetection which returns a job identifier (JobId). When the label detection operation finishes, Amazon Rekognition publishes a completion status to the Amazon Simple Notification Service topic registered in the initial call to StartlabelDetection.

To get the results of the label detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetLabelDetection and pass the job identifier (JobId) from the initial call to StartLabelDetection.

GetLabelDetection returns an array of detected labels (Labels) sorted by the time the labels were detected. You can also sort by the label name by specifying NAME for the SortBy input parameter. If there is no NAME specified, the default sort is by timestamp.

You can select how results are aggregated by using the AggregateBy input parameter. The default aggregation method is TIMESTAMPS. You can also aggregate by SEGMENTS, which aggregates all instances of labels detected in a given segment.

The returned Labels array may include the following attributes:

Timestamp and Bounding box information are returned for detected Instances, only if aggregation is done by TIMESTAMPS. If aggregating by SEGMENTS, information about detected instances isn’t returned.

The version of the label model used for the detection is also returned.

Note DominantColors isn't returned for Instances, although it is shown as part of the response in the sample seen below.

Use MaxResults parameter to limit the number of labels returned. If there are more results than specified in MaxResults, the value of NextToken in the operation response contains a pagination token for getting the next set of results. To get the next page of results, call GetlabelDetection and populate the NextToken request parameter with the token value returned from the previous call to GetLabelDetection.

", + "GetMediaAnalysisJob": "

Retrieves the results for a given media analysis job. Takes a JobId returned by StartMediaAnalysisJob.

", "GetPersonTracking": "

Gets the path tracking results of a Amazon Rekognition Video analysis started by StartPersonTracking.

The person path tracking operation is started by a call to StartPersonTracking which returns a job identifier (JobId). When the operation finishes, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic registered in the initial call to StartPersonTracking.

To get the results of the person path tracking operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetPersonTracking and pass the job identifier (JobId) from the initial call to StartPersonTracking.

GetPersonTracking returns an array, Persons, of tracked persons and the time(s) their paths were tracked in the video.

GetPersonTracking only returns the default facial attributes (BoundingBox, Confidence, Landmarks, Pose, and Quality). The other facial attributes listed in the Face object of the following response syntax are not returned.

For more information, see FaceDetail in the Amazon Rekognition Developer Guide.

By default, the array is sorted by the time(s) a person's path is tracked in the video. You can sort by tracked persons by specifying INDEX for the SortBy input parameter.

Use the MaxResults parameter to limit the number of items returned. If there are more results than specified in MaxResults, the value of NextToken in the operation response contains a pagination token for getting the next set of results. To get the next page of results, call GetPersonTracking and populate the NextToken request parameter with the token value returned from the previous call to GetPersonTracking.

", "GetSegmentDetection": "

Gets the segment detection results of a Amazon Rekognition Video analysis started by StartSegmentDetection.

Segment detection with Amazon Rekognition Video is an asynchronous operation. You start segment detection by calling StartSegmentDetection which returns a job identifier (JobId). When the segment detection operation finishes, Amazon Rekognition publishes a completion status to the Amazon Simple Notification Service topic registered in the initial call to StartSegmentDetection. To get the results of the segment detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. if so, call GetSegmentDetection and pass the job identifier (JobId) from the initial call of StartSegmentDetection.

GetSegmentDetection returns detected segments in an array (Segments) of SegmentDetection objects. Segments is sorted by the segment types specified in the SegmentTypes input parameter of StartSegmentDetection. Each element of the array includes the detected segment, the precentage confidence in the acuracy of the detected segment, the type of the segment, and the frame in which the segment was detected.

Use SelectedSegmentTypes to find out the type of segment detection requested in the call to StartSegmentDetection.

Use the MaxResults parameter to limit the number of segment detections returned. If there are more results than specified in MaxResults, the value of NextToken in the operation response contains a pagination token for getting the next set of results. To get the next page of results, call GetSegmentDetection and populate the NextToken request parameter with the token value returned from the previous call to GetSegmentDetection.

For more information, see Detecting video segments in stored video in the Amazon Rekognition Developer Guide.

", "GetTextDetection": "

Gets the text detection results of a Amazon Rekognition Video analysis started by StartTextDetection.

Text detection with Amazon Rekognition Video is an asynchronous operation. You start text detection by calling StartTextDetection which returns a job identifier (JobId) When the text detection operation finishes, Amazon Rekognition publishes a completion status to the Amazon Simple Notification Service topic registered in the initial call to StartTextDetection. To get the results of the text detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. if so, call GetTextDetection and pass the job identifier (JobId) from the initial call of StartLabelDetection.

GetTextDetection returns an array of detected text (TextDetections) sorted by the time the text was detected, up to 100 words per frame of video.

Each element of the array includes the detected text, the precentage confidence in the acuracy of the detected text, the time the text was detected, bounding box information for where the text was located, and unique identifiers for words and their lines.

Use MaxResults parameter to limit the number of text detections returned. If there are more results than specified in MaxResults, the value of NextToken in the operation response contains a pagination token for getting the next set of results. To get the next page of results, call GetTextDetection and populate the NextToken request parameter with the token value returned from the previous call to GetTextDetection.

", @@ -48,6 +49,7 @@ "ListDatasetEntries": "

This operation applies only to Amazon Rekognition Custom Labels.

Lists the entries (images) within a dataset. An entry is a JSON Line that contains the information for a single image, including the image location, assigned labels, and object location bounding boxes. For more information, see Creating a manifest file.

JSON Lines in the response include information about non-terminal errors found in the dataset. Non terminal errors are reported in errors lists within each JSON Line. The same information is reported in the training and testing validation result manifests that Amazon Rekognition Custom Labels creates during model training.

You can filter the response in variety of ways, such as choosing which labels to return and returning JSON Lines created after a specific date.

This operation requires permissions to perform the rekognition:ListDatasetEntries action.

", "ListDatasetLabels": "

This operation applies only to Amazon Rekognition Custom Labels.

Lists the labels in a dataset. Amazon Rekognition Custom Labels uses labels to describe images. For more information, see Labeling images.

Lists the labels in a dataset. Amazon Rekognition Custom Labels uses labels to describe images. For more information, see Labeling images in the Amazon Rekognition Custom Labels Developer Guide.

", "ListFaces": "

Returns metadata for faces in the specified collection. This metadata includes information such as the bounding box coordinates, the confidence (that the bounding box contains a face), and face ID. For an example, see Listing Faces in a Collection in the Amazon Rekognition Developer Guide.

This operation requires permissions to perform the rekognition:ListFaces action.

", + "ListMediaAnalysisJobs": "

Returns a list of media analysis jobs. Results are sorted by CreationTimestamp in descending order.

", "ListProjectPolicies": "

This operation applies only to Amazon Rekognition Custom Labels.

Gets a list of the project policies attached to a project.

To attach a project policy to a project, call PutProjectPolicy. To remove a project policy from a project, call DeleteProjectPolicy.

This operation requires permissions to perform the rekognition:ListProjectPolicies action.

", "ListStreamProcessors": "

Gets a list of stream processors that you have created with CreateStreamProcessor.

", "ListTagsForResource": "

Returns a list of tags in an Amazon Rekognition collection, stream processor, or Custom Labels model.

This operation requires permissions to perform the rekognition:ListTagsForResource action.

", @@ -63,6 +65,7 @@ "StartFaceDetection": "

Starts asynchronous detection of faces in a stored video.

Amazon Rekognition Video can detect faces in a video stored in an Amazon S3 bucket. Use Video to specify the bucket name and the filename of the video. StartFaceDetection returns a job identifier (JobId) that you use to get the results of the operation. When face detection is finished, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic that you specify in NotificationChannel. To get the results of the face detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetFaceDetection and pass the job identifier (JobId) from the initial call to StartFaceDetection.

For more information, see Detecting faces in a stored video in the Amazon Rekognition Developer Guide.

", "StartFaceSearch": "

Starts the asynchronous search for faces in a collection that match the faces of persons detected in a stored video.

The video must be stored in an Amazon S3 bucket. Use Video to specify the bucket name and the filename of the video. StartFaceSearch returns a job identifier (JobId) which you use to get the search results once the search has completed. When searching is finished, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic that you specify in NotificationChannel. To get the search results, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetFaceSearch and pass the job identifier (JobId) from the initial call to StartFaceSearch. For more information, see Searching stored videos for faces.

", "StartLabelDetection": "

Starts asynchronous detection of labels in a stored video.

Amazon Rekognition Video can detect labels in a video. Labels are instances of real-world entities. This includes objects like flower, tree, and table; events like wedding, graduation, and birthday party; concepts like landscape, evening, and nature; and activities like a person getting out of a car or a person skiing.

The video must be stored in an Amazon S3 bucket. Use Video to specify the bucket name and the filename of the video. StartLabelDetection returns a job identifier (JobId) which you use to get the results of the operation. When label detection is finished, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic that you specify in NotificationChannel.

To get the results of the label detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetLabelDetection and pass the job identifier (JobId) from the initial call to StartLabelDetection.

Optional Parameters

StartLabelDetection has the GENERAL_LABELS Feature applied by default. This feature allows you to provide filtering criteria to the Settings parameter. You can filter with sets of individual labels or with label categories. You can specify inclusive filters, exclusive filters, or a combination of inclusive and exclusive filters. For more information on filtering, see Detecting labels in a video.

You can specify MinConfidence to control the confidence threshold for the labels returned. The default is 50.

", + "StartMediaAnalysisJob": "

Initiates a new media analysis job. Accepts a manifest file in an Amazon S3 bucket. The output is a manifest file and a summary of the manifest stored in the Amazon S3 bucket.

", "StartPersonTracking": "

Starts the asynchronous tracking of a person's path in a stored video.

Amazon Rekognition Video can track the path of people in a video stored in an Amazon S3 bucket. Use Video to specify the bucket name and the filename of the video. StartPersonTracking returns a job identifier (JobId) which you use to get the results of the operation. When label detection is finished, Amazon Rekognition publishes a completion status to the Amazon Simple Notification Service topic that you specify in NotificationChannel.

To get the results of the person detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. If so, call GetPersonTracking and pass the job identifier (JobId) from the initial call to StartPersonTracking.

", "StartProjectVersion": "

This operation applies only to Amazon Rekognition Custom Labels.

Starts the running of the version of a model. Starting a model takes a while to complete. To check the current state of the model, use DescribeProjectVersions.

Once the model is running, you can detect custom labels in new images by calling DetectCustomLabels.

You are charged for the amount of time that the model is running. To stop a running model, call StopProjectVersion.

This operation requires permissions to perform the rekognition:StartProjectVersion action.

", "StartSegmentDetection": "

Starts asynchronous detection of segment detection in a stored video.

Amazon Rekognition Video can detect segments in a video stored in an Amazon S3 bucket. Use Video to specify the bucket name and the filename of the video. StartSegmentDetection returns a job identifier (JobId) which you use to get the results of the operation. When segment detection is finished, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic that you specify in NotificationChannel.

You can use the Filters (StartSegmentDetectionFilters) input parameter to specify the minimum detection confidence returned in the response. Within Filters, use ShotFilter (StartShotDetectionFilter) to filter detected shots. Use TechnicalCueFilter (StartTechnicalCueDetectionFilter) to filter technical cues.

To get the results of the segment detection operation, first check that the status value published to the Amazon SNS topic is SUCCEEDED. if so, call GetSegmentDetection and pass the job identifier (JobId) from the initial call to StartSegmentDetection.

For more information, see Detecting video segments in stored video in the Amazon Rekognition Developer Guide.

", @@ -286,6 +289,7 @@ "StartFaceDetectionRequest$ClientRequestToken": "

Idempotent token used to identify the start request. If you use the same token with multiple StartFaceDetection requests, the same JobId is returned. Use ClientRequestToken to prevent the same job from being accidently started more than once.

", "StartFaceSearchRequest$ClientRequestToken": "

Idempotent token used to identify the start request. If you use the same token with multiple StartFaceSearch requests, the same JobId is returned. Use ClientRequestToken to prevent the same job from being accidently started more than once.

", "StartLabelDetectionRequest$ClientRequestToken": "

Idempotent token used to identify the start request. If you use the same token with multiple StartLabelDetection requests, the same JobId is returned. Use ClientRequestToken to prevent the same job from being accidently started more than once.

", + "StartMediaAnalysisJobRequest$ClientRequestToken": "

Idempotency token used to prevent the accidental creation of duplicate versions. If you use the same token with multiple StartMediaAnalysisJobRequest requests, the same response is returned. Use ClientRequestToken to prevent the same request from being processed more than once.

", "StartPersonTrackingRequest$ClientRequestToken": "

Idempotent token used to identify the start request. If you use the same token with multiple StartPersonTracking requests, the same JobId is returned. Use ClientRequestToken to prevent the same job from being accidently started more than once.

", "StartSegmentDetectionRequest$ClientRequestToken": "

Idempotent token used to identify the start request. If you use the same token with multiple StartSegmentDetection requests, the same JobId is returned. Use ClientRequestToken to prevent the same job from being accidently started more than once.

", "StartTextDetectionRequest$ClientRequestToken": "

Idempotent token used to identify the start request. If you use the same token with multiple StartTextDetection requests, the same JobId is returned. Use ClientRequestToken to prevent the same job from being accidentaly started more than once.

" @@ -692,6 +696,10 @@ "DescribeCollectionResponse$CreationTimestamp": "

The number of milliseconds since the Unix epoch time until the creation of the collection. The Unix epoch time is 00:00:00 Coordinated Universal Time (UTC), Thursday, 1 January 1970.

", "DescribeStreamProcessorResponse$CreationTimestamp": "

Date and time the stream processor was created

", "DescribeStreamProcessorResponse$LastUpdateTimestamp": "

The time, in Unix format, the stream processor was last updated. For example, when the stream processor moves from a running state to a failed state, or when the user starts or stops the stream processor.

", + "GetMediaAnalysisJobResponse$CreationTimestamp": "

The Unix date and time when the job was started.

", + "GetMediaAnalysisJobResponse$CompletionTimestamp": "

The Unix date and time when the job finished.

", + "MediaAnalysisJobDescription$CreationTimestamp": "

The Unix date and time when the job was started.

", + "MediaAnalysisJobDescription$CompletionTimestamp": "

The Unix date and time when the job finished.

", "ProjectDescription$CreationTimestamp": "

The Unix timestamp for the date and time that the project was created.

", "ProjectPolicy$CreationTimestamp": "

The Unix datetime for the creation of the project policy.

", "ProjectPolicy$LastUpdatedTimestamp": "

The Unix datetime for when the project policy was last updated.

", @@ -1075,6 +1083,8 @@ "ListDatasetEntriesResponse$NextToken": "

If the previous response was incomplete (because there is more results to retrieve), Amazon Rekognition Custom Labels returns a pagination token in the response. You can use this pagination token to retrieve the next set of results.

", "ListDatasetLabelsRequest$NextToken": "

If the previous response was incomplete (because there is more results to retrieve), Amazon Rekognition Custom Labels returns a pagination token in the response. You can use this pagination token to retrieve the next set of results.

", "ListDatasetLabelsResponse$NextToken": "

If the previous response was incomplete (because there is more results to retrieve), Amazon Rekognition Custom Labels returns a pagination token in the response. You can use this pagination token to retrieve the next set of results.

", + "ListMediaAnalysisJobsRequest$NextToken": "

Pagination token, if the previous response was incomplete.

", + "ListMediaAnalysisJobsResponse$NextToken": "

Pagination token, if the previous response was incomplete.

", "ListProjectPoliciesRequest$NextToken": "

If the previous response was incomplete (because there is more results to retrieve), Amazon Rekognition Custom Labels returns a pagination token in the response. You can use this pagination token to retrieve the next set of results.

", "ListProjectPoliciesResponse$NextToken": "

If the response is truncated, Amazon Rekognition returns this token that you can use in the subsequent request to retrieve the next set of project policies.

" } @@ -1379,6 +1389,16 @@ "refs": { } }, + "GetMediaAnalysisJobRequest": { + "base": null, + "refs": { + } + }, + "GetMediaAnalysisJobResponse": { + "base": null, + "refs": { + } + }, "GetPersonTrackingRequest": { "base": null, "refs": { @@ -1575,6 +1595,11 @@ "refs": { } }, + "InvalidManifestException": { + "base": "

Indicates that a provided manifest file is empty or larger than the allowed limit.

", + "refs": { + } + }, "InvalidPaginationTokenException": { "base": "

Pagination token in the request is not valid.

", "refs": { @@ -1695,7 +1720,10 @@ "CreateProjectVersionRequest$KmsKeyId": "

The identifier for your AWS Key Management Service key (AWS KMS key). You can supply the Amazon Resource Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key, or an alias ARN. The key is used to encrypt training images, test images, and manifest files copied into the service for the project version. Your source images are unaffected. The key is also used to encrypt training results and manifest files written to the output Amazon S3 bucket (OutputConfig).

If you choose to use your own KMS key, you need the following permissions on the KMS key.

If you don't specify a value for KmsKeyId, images copied into the service are encrypted using a key that AWS owns and manages.

", "CreateStreamProcessorRequest$KmsKeyId": "

The identifier for your AWS Key Management Service key (AWS KMS key). This is an optional parameter for label detection stream processors and should not be used to create a face search stream processor. You can supply the Amazon Resource Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key, or an alias ARN. The key is used to encrypt results and data published to your Amazon S3 bucket, which includes image frames and hero images. Your source images are unaffected.

", "DescribeStreamProcessorResponse$KmsKeyId": "

The identifier for your AWS Key Management Service key (AWS KMS key). This is an optional parameter for label detection stream processors.

", - "ProjectVersionDescription$KmsKeyId": "

The identifer for the AWS Key Management Service key (AWS KMS key) that was used to encrypt the model during training.

" + "GetMediaAnalysisJobResponse$KmsKeyId": "

KMS Key that was provided in the creation request.

", + "MediaAnalysisJobDescription$KmsKeyId": "

KMS Key that was provided in the creation request.

", + "ProjectVersionDescription$KmsKeyId": "

The identifer for the AWS Key Management Service key (AWS KMS key) that was used to encrypt the model during training.

", + "StartMediaAnalysisJobRequest$KmsKeyId": "

The identifier of customer managed AWS KMS key (name or ARN). The key is used to encrypt images copied into the service. The key is also used to encrypt results and manifest files written to the output Amazon S3 bucket.

" } }, "KnownGender": { @@ -1869,6 +1897,22 @@ "refs": { } }, + "ListMediaAnalysisJobsPageSize": { + "base": null, + "refs": { + "ListMediaAnalysisJobsRequest$MaxResults": "

The maximum number of results to return per paginated call. The largest value user can specify is 100. If user specifies a value greater than 100, an InvalidParameterException error occurs. The default value is 100.

" + } + }, + "ListMediaAnalysisJobsRequest": { + "base": null, + "refs": { + } + }, + "ListMediaAnalysisJobsResponse": { + "base": null, + "refs": { + } + }, "ListProjectPoliciesPageSize": { "base": null, "refs": { @@ -2005,6 +2049,105 @@ "SearchUsersRequest$MaxUsers": "

Maximum number of identities to return.

" } }, + "MediaAnalysisDetectModerationLabelsConfig": { + "base": "

Configuration for Moderation Labels Detection.

", + "refs": { + "MediaAnalysisOperationsConfig$DetectModerationLabels": "

Contains configuration options for a DetectModerationLabels job.

" + } + }, + "MediaAnalysisInput": { + "base": "

Contains input information for a media analysis job.

", + "refs": { + "GetMediaAnalysisJobResponse$Input": "

Reference to the input manifest that was provided in the job creation request.

", + "MediaAnalysisJobDescription$Input": "

Reference to the input manifest that was provided in the job creation request.

", + "StartMediaAnalysisJobRequest$Input": "

Input data to be analyzed by the job.

" + } + }, + "MediaAnalysisJobDescription": { + "base": "

Description for a media analysis job.

", + "refs": { + "MediaAnalysisJobDescriptions$member": null + } + }, + "MediaAnalysisJobDescriptions": { + "base": null, + "refs": { + "ListMediaAnalysisJobsResponse$MediaAnalysisJobs": "

Contains a list of all media analysis jobs.

" + } + }, + "MediaAnalysisJobFailureCode": { + "base": null, + "refs": { + "MediaAnalysisJobFailureDetails$Code": "

Error code for the failed job.

" + } + }, + "MediaAnalysisJobFailureDetails": { + "base": "

Details about the error that resulted in failure of the job.

", + "refs": { + "GetMediaAnalysisJobResponse$FailureDetails": "

Details about the error that resulted in failure of the job.

", + "MediaAnalysisJobDescription$FailureDetails": "

Details about the error that resulted in failure of the job.

" + } + }, + "MediaAnalysisJobId": { + "base": null, + "refs": { + "GetMediaAnalysisJobRequest$JobId": "

Unique identifier for the media analysis job for which you want to retrieve results.

", + "GetMediaAnalysisJobResponse$JobId": "

The identifier for the media analysis job.

", + "MediaAnalysisJobDescription$JobId": "

The identifier for a media analysis job.

", + "StartMediaAnalysisJobResponse$JobId": "

Identifier for the created job.

" + } + }, + "MediaAnalysisJobName": { + "base": null, + "refs": { + "GetMediaAnalysisJobResponse$JobName": "

The name of the media analysis job.

", + "MediaAnalysisJobDescription$JobName": "

The name of a media analysis job.

", + "StartMediaAnalysisJobRequest$JobName": "

The name of the job. Does not have to be unique.

" + } + }, + "MediaAnalysisJobStatus": { + "base": null, + "refs": { + "GetMediaAnalysisJobResponse$Status": "

The current status of the media analysis job.

", + "MediaAnalysisJobDescription$Status": "

The status of the media analysis job being retrieved.

" + } + }, + "MediaAnalysisManifestSummary": { + "base": "

Summary that provides statistics on input manifest and errors identified in the input manifest.

", + "refs": { + "GetMediaAnalysisJobResponse$ManifestSummary": "

The summary manifest provides statistics on input manifest and errors identified in the input manifest.

", + "MediaAnalysisJobDescription$ManifestSummary": "

Provides statistics on input manifest and errors identified in the input manifest.

" + } + }, + "MediaAnalysisOperationsConfig": { + "base": "

Configuration options for a media analysis job. Configuration is operation-specific.

", + "refs": { + "GetMediaAnalysisJobResponse$OperationsConfig": "

Operation configurations that were provided during job creation.

", + "MediaAnalysisJobDescription$OperationsConfig": "

Operation configurations that were provided during job creation.

", + "StartMediaAnalysisJobRequest$OperationsConfig": "

Configuration options for the media analysis job to be created.

" + } + }, + "MediaAnalysisOutputConfig": { + "base": "

Output configuration provided in the job creation request.

", + "refs": { + "GetMediaAnalysisJobResponse$OutputConfig": "

Output configuration that was provided in the creation request.

", + "MediaAnalysisJobDescription$OutputConfig": "

Output configuration that was provided in the creation request.

", + "StartMediaAnalysisJobRequest$OutputConfig": "

The Amazon S3 bucket location to store the results.

" + } + }, + "MediaAnalysisResults": { + "base": "

Contains the results for a media analysis job created with StartMediaAnalysisJob.

", + "refs": { + "GetMediaAnalysisJobResponse$Results": "

Output manifest that contains prediction results.

", + "MediaAnalysisJobDescription$Results": "

Output manifest that contains prediction results.

" + } + }, + "MediaAnalysisS3KeyPrefix": { + "base": null, + "refs": { + "MediaAnalysisOutputConfig$S3KeyPrefix": "

Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for storage.

" + } + }, "MinCoveragePercentage": { "base": null, "refs": { @@ -2150,6 +2293,7 @@ "GetFaceLivenessSessionResultsResponse$Confidence": "

Probabalistic confidence score for if the person in the given video was live, represented as a float value between 0 to 100.

", "Instance$Confidence": "

The confidence that Amazon Rekognition has in the accuracy of the bounding box.

", "Label$Confidence": "

Level of confidence.

", + "MediaAnalysisDetectModerationLabelsConfig$MinConfidence": "

Specifies the minimum confidence level for the moderation labels to return. Amazon Rekognition doesn't return any labels with a confidence level lower than this specified value.

", "ModerationLabel$Confidence": "

Specifies the confidence that Amazon Rekognition has that the label has been correctly identified.

If you don't specify the MinConfidence parameter in the call to DetectModerationLabels, the operation returns labels with a confidence value greater than or equal to 50 percent.

", "MouthOpen$Confidence": "

Level of confidence in the determination.

", "Mustache$Confidence": "

Level of confidence in the determination.

", @@ -2356,7 +2500,8 @@ "base": null, "refs": { "DetectModerationLabelsRequest$ProjectVersion": "

Identifier for the custom adapter. Expects the ProjectVersionArn as a value. Use the CreateProject or CreateProjectVersion APIs to create a custom adapter.

", - "DetectModerationLabelsResponse$ProjectVersion": "

Identifier of the custom adapter that was used during inference. If during inference the adapter was EXPIRED, then the parameter will not be returned, indicating that a base moderation detection project version was used.

" + "DetectModerationLabelsResponse$ProjectVersion": "

Identifier of the custom adapter that was used during inference. If during inference the adapter was EXPIRED, then the parameter will not be returned, indicating that a base moderation detection project version was used.

", + "MediaAnalysisDetectModerationLabelsConfig$ProjectVersion": "

Specifies the custom moderation model to be used during the label detection job. If not provided the pre-trained model is used.

" } }, "ProjectVersionStatus": { @@ -2547,6 +2692,7 @@ "base": null, "refs": { "LivenessOutputConfig$S3Bucket": "

The path to an AWS Amazon S3 bucket used to store Face Liveness session results.

", + "MediaAnalysisOutputConfig$S3Bucket": "

Specifies the Amazon S3 bucket to contain the output of the media analysis job.

", "OutputConfig$S3Bucket": "

The S3 bucket where training output is placed.

", "S3Destination$Bucket": "

The name of the Amazon S3 bucket you want to associate with the streaming video project. You must be the owner of the Amazon S3 bucket.

", "S3Object$Bucket": "

Name of the S3 bucket.

" @@ -2571,6 +2717,9 @@ "AuditImage$S3Object": null, "GroundTruthManifest$S3Object": null, "Image$S3Object": "

Identifies an S3 object as the image source.

", + "MediaAnalysisInput$S3Object": null, + "MediaAnalysisManifestSummary$S3Object": null, + "MediaAnalysisResults$S3Object": null, "Summary$S3Object": null, "Video$S3Object": "

The Amazon S3 bucket name and file name for the video.

" } @@ -2772,6 +2921,16 @@ "refs": { } }, + "StartMediaAnalysisJobRequest": { + "base": null, + "refs": { + } + }, + "StartMediaAnalysisJobResponse": { + "base": null, + "refs": { + } + }, "StartPersonTrackingRequest": { "base": null, "refs": { @@ -3026,6 +3185,7 @@ "LabelCategory$Name": "

The name of a category that applies to a given label.

", "ListFacesResponse$NextToken": "

If the response is truncated, Amazon Rekognition returns this token that you can use in the subsequent request to retrieve the next set of faces.

", "ListFacesResponse$FaceModelVersion": "

Version number of the face detection model associated with the input collection (CollectionId).

", + "MediaAnalysisJobFailureDetails$Message": "

Human readable error message.

", "ModerationLabel$Name": "

The label name for the type of unsafe content detected in the image.

", "ModerationLabel$ParentName": "

The name for the parent label. Labels at the top level of the hierarchy have the parent label \"\".

", "Parent$Name": "

The name of the parent label.

", diff --git a/models/apis/rekognition/2016-06-27/endpoint-rule-set-1.json b/models/apis/rekognition/2016-06-27/endpoint-rule-set-1.json index c30bffadd5f..66c7174b0b4 100644 --- a/models/apis/rekognition/2016-06-27/endpoint-rule-set-1.json +++ b/models/apis/rekognition/2016-06-27/endpoint-rule-set-1.json @@ -40,7 +40,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -83,7 +82,8 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -96,7 +96,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -110,7 +109,6 @@ "assign": "PartitionResult" } ], - "type": "tree", "rules": [ { "conditions": [ @@ -133,7 +131,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -168,7 +165,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [], @@ -179,14 +175,16 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "FIPS and DualStack are enabled, but this partition does not support one or both", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -200,14 +198,12 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ { "fn": "booleanEquals", "argv": [ - true, { "fn": "getAttr", "argv": [ @@ -216,11 +212,11 @@ }, "supportsFIPS" ] - } + }, + true ] } ], - "type": "tree", "rules": [ { "conditions": [], @@ -231,14 +227,16 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "FIPS is enabled but this partition does not support FIPS", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -252,7 +250,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -272,7 +269,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [], @@ -283,14 +279,16 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "DualStack is enabled but this partition does not support DualStack", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [], @@ -301,9 +299,11 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], diff --git a/models/apis/rekognition/2016-06-27/examples-1.json b/models/apis/rekognition/2016-06-27/examples-1.json index df60c62075e..d42da3a6f33 100644 --- a/models/apis/rekognition/2016-06-27/examples-1.json +++ b/models/apis/rekognition/2016-06-27/examples-1.json @@ -764,6 +764,57 @@ "title": "To distribute an Amazon Rekognition Custom Labels dataset" } ], + "GetMediaAnalysisJob": [ + { + "input": { + "JobId": "861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537" + }, + "output": { + "CompletionTimestamp": "2023-07-28T08:05:51.958000-07:00", + "CreationTimestamp": "2023-07-28T08:05:51.958000-06:00", + "Input": { + "S3Object": { + "Bucket": "input-bucket", + "Name": "input-manifest.json" + } + }, + "JobId": "861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537", + "JobName": "job-name", + "ManifestSummary": { + "S3Object": { + "Bucket": "output-bucket", + "Name": "output-location/861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537-manifest-summary.json" + } + }, + "OperationsConfig": { + "DetectModerationLabels": { + "MinConfidence": 50, + "ProjectVersion": "arn:aws:rekognition:us-east-1:111122223333:project/my-project/version/1/1690556751958" + } + }, + "OutputConfig": { + "S3Bucket": "output-bucket", + "S3KeyPrefix": "output-location" + }, + "Results": { + "S3Object": { + "Bucket": "output-bucket", + "Name": "output-location/861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537-results.jsonl" + } + }, + "Status": "SUCCEEDED" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "Retrieves the results for a given media analysis job.", + "id": "getmediaanalysisjob-1697650068124", + "title": "GetMediaAnalysisJob" + } + ], "IndexFaces": [ { "input": { @@ -1060,6 +1111,61 @@ "title": "To list the faces in a collection" } ], + "ListMediaAnalysisJobs": [ + { + "input": { + "MaxResults": 10 + }, + "output": { + "MediaAnalysisJobs": [ + { + "CompletionTimestamp": "2023-07-28T08:05:51.958000-07:00", + "CreationTimestamp": "2023-07-28T08:05:51.958000-06:00", + "Input": { + "S3Object": { + "Bucket": "input-bucket", + "Name": "input-manifest.json" + } + }, + "JobId": "861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537", + "JobName": "job-name", + "ManifestSummary": { + "S3Object": { + "Bucket": "output-bucket", + "Name": "output-location/861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537-manifest-summary.json" + } + }, + "OperationsConfig": { + "DetectModerationLabels": { + "MinConfidence": 50, + "ProjectVersion": "arn:aws:rekognition:us-east-1:111122223333:project/my-project/version/1/1690556751958" + } + }, + "OutputConfig": { + "S3Bucket": "output-bucket", + "S3KeyPrefix": "output-location" + }, + "Results": { + "S3Object": { + "Bucket": "output-bucket", + "Name": "output-location/861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537-results.jsonl" + } + }, + "Status": "SUCCEEDED" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "Returns a list of media analysis jobs.", + "id": "listmediaanalysisjobs-1697650653077", + "title": "ListMediaAnalysisJobs" + } + ], "ListProjectPolicies": [ { "input": { @@ -1366,6 +1472,41 @@ "title": "SearchUsersByImage" } ], + "StartMediaAnalysisJob": [ + { + "input": { + "Input": { + "S3Object": { + "Bucket": "input-bucket", + "Name": "input-manifest.json" + } + }, + "JobName": "job-name", + "OperationsConfig": { + "DetectModerationLabels": { + "MinConfidence": 50, + "ProjectVersion": "arn:aws:rekognition:us-east-1:111122223333:project/my-project/version/1/1690556751958" + } + }, + "OutputConfig": { + "S3Bucket": "output-bucket", + "S3KeyPrefix": "output-location" + } + }, + "output": { + "JobId": "861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "Initiates a new media analysis job.", + "id": "startmediaanalysisjob-1697651090922", + "title": "StartMediaAnalysisJob" + } + ], "StartProjectVersion": [ { "input": { diff --git a/models/apis/rekognition/2016-06-27/paginators-1.json b/models/apis/rekognition/2016-06-27/paginators-1.json index 6ac67e68e21..edb2343cca4 100644 --- a/models/apis/rekognition/2016-06-27/paginators-1.json +++ b/models/apis/rekognition/2016-06-27/paginators-1.json @@ -76,6 +76,11 @@ "output_token": "NextToken", "result_key": "Faces" }, + "ListMediaAnalysisJobs": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken" + }, "ListProjectPolicies": { "input_token": "NextToken", "limit_key": "MaxResults", diff --git a/models/endpoints/endpoints.json b/models/endpoints/endpoints.json index 9ebd6d1629b..aa86d857875 100644 --- a/models/endpoints/endpoints.json +++ b/models/endpoints/endpoints.json @@ -2436,6 +2436,12 @@ }, "hostname" : "bedrock.ap-southeast-1.amazonaws.com" }, + "bedrock-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "bedrock.eu-central-1.amazonaws.com" + }, "bedrock-fips-us-east-1" : { "credentialScope" : { "region" : "us-east-1" @@ -2460,6 +2466,12 @@ }, "hostname" : "bedrock-runtime.ap-southeast-1.amazonaws.com" }, + "bedrock-runtime-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "bedrock-runtime.eu-central-1.amazonaws.com" + }, "bedrock-runtime-fips-us-east-1" : { "credentialScope" : { "region" : "us-east-1" @@ -2496,6 +2508,7 @@ }, "hostname" : "bedrock.us-west-2.amazonaws.com" }, + "eu-central-1" : { }, "us-east-1" : { }, "us-west-2" : { } } @@ -9086,8 +9099,29 @@ }, "iottwinmaker" : { "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, "ap-southeast-1" : { }, "ap-southeast-2" : { }, + "api-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "api.iottwinmaker.ap-northeast-1.amazonaws.com" + }, + "api-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "api.iottwinmaker.ap-northeast-2.amazonaws.com" + }, + "api-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "api.iottwinmaker.ap-south-1.amazonaws.com" + }, "api-ap-southeast-1" : { "credentialScope" : { "region" : "ap-southeast-1" @@ -9124,6 +9158,24 @@ }, "hostname" : "api.iottwinmaker.us-west-2.amazonaws.com" }, + "data-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "data.iottwinmaker.ap-northeast-1.amazonaws.com" + }, + "data-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "data.iottwinmaker.ap-northeast-2.amazonaws.com" + }, + "data-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "data.iottwinmaker.ap-south-1.amazonaws.com" + }, "data-ap-southeast-1" : { "credentialScope" : { "region" : "ap-southeast-1" diff --git a/service/marketplacecommerceanalytics/api.go b/service/marketplacecommerceanalytics/api.go index 217fa467d55..3f18cb1cb81 100644 --- a/service/marketplacecommerceanalytics/api.go +++ b/service/marketplacecommerceanalytics/api.go @@ -124,7 +124,12 @@ const opStartSupportDataExport = "StartSupportDataExport" // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/marketplacecommerceanalytics-2015-07-01/StartSupportDataExport +// +// Deprecated: This target has been deprecated. As of December 2022 Product Support Connection is no longer supported. func (c *MarketplaceCommerceAnalytics) StartSupportDataExportRequest(input *StartSupportDataExportInput) (req *request.Request, output *StartSupportDataExportOutput) { + if c.Client.Config.Logger != nil { + c.Client.Config.Logger.Log("This operation, StartSupportDataExport, has been deprecated") + } op := &request.Operation{ Name: opStartSupportDataExport, HTTPMethod: "POST", @@ -142,17 +147,17 @@ func (c *MarketplaceCommerceAnalytics) StartSupportDataExportRequest(input *Star // StartSupportDataExport API operation for AWS Marketplace Commerce Analytics. // -// Given a data set type and a from date, asynchronously publishes the requested -// customer support data to the specified S3 bucket and notifies the specified -// SNS topic once the data is available. Returns a unique request identifier -// that can be used to correlate requests with notifications from the SNS topic. -// Data sets will be published in comma-separated values (CSV) format with the -// file name {data_set_type}_YYYY-MM-DD'T'HH-mm-ss'Z'.csv. If a file with the -// same name already exists (e.g. if the same data set is requested twice), -// the original file will be overwritten by the new file. Requires a Role with -// an attached permissions policy providing Allow permissions for the following -// actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, -// iam:GetRolePolicy. +// This target has been deprecated. Given a data set type and a from date, asynchronously +// publishes the requested customer support data to the specified S3 bucket +// and notifies the specified SNS topic once the data is available. Returns +// a unique request identifier that can be used to correlate requests with notifications +// from the SNS topic. Data sets will be published in comma-separated values +// (CSV) format with the file name {data_set_type}_YYYY-MM-DD'T'HH-mm-ss'Z'.csv. +// If a file with the same name already exists (e.g. if the same data set is +// requested twice), the original file will be overwritten by the new file. +// Requires a Role with an attached permissions policy providing Allow permissions +// for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, +// sns:Publish, iam:GetRolePolicy. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -166,6 +171,8 @@ func (c *MarketplaceCommerceAnalytics) StartSupportDataExportRequest(input *Star // This exception is thrown when an internal service error occurs. // // See also, https://docs.aws.amazon.com/goto/WebAPI/marketplacecommerceanalytics-2015-07-01/StartSupportDataExport +// +// Deprecated: This target has been deprecated. As of December 2022 Product Support Connection is no longer supported. func (c *MarketplaceCommerceAnalytics) StartSupportDataExport(input *StartSupportDataExportInput) (*StartSupportDataExportOutput, error) { req, out := c.StartSupportDataExportRequest(input) return out, req.Send() @@ -180,6 +187,8 @@ func (c *MarketplaceCommerceAnalytics) StartSupportDataExport(input *StartSuppor // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. +// +// Deprecated: This target has been deprecated. As of December 2022 Product Support Connection is no longer supported. func (c *MarketplaceCommerceAnalytics) StartSupportDataExportWithContext(ctx aws.Context, input *StartSupportDataExportInput, opts ...request.Option) (*StartSupportDataExportOutput, error) { req, out := c.StartSupportDataExportRequest(input) req.SetContext(ctx) @@ -517,20 +526,25 @@ func (s *GenerateDataSetOutput) SetDataSetRequestId(v string) *GenerateDataSetOu return s } -// Container for the parameters to the StartSupportDataExport operation. +// This target has been deprecated. Container for the parameters to the StartSupportDataExport +// operation. +// +// Deprecated: This target has been deprecated. As of December 2022 Product Support Connection is no longer supported. type StartSupportDataExportInput struct { - _ struct{} `type:"structure"` + _ struct{} `deprecated:"true" type:"structure"` - // (Optional) Key-value pairs which will be returned, unmodified, in the Amazon - // SNS notification message and the data set metadata file. + // This target has been deprecated. (Optional) Key-value pairs which will be + // returned, unmodified, in the Amazon SNS notification message and the data + // set metadata file. CustomerDefinedValues map[string]*string `locationName:"customerDefinedValues" min:"1" type:"map"` - // Specifies the data set type to be written to the output csv file. The data - // set types customer_support_contacts_data and test_customer_support_contacts_data - // both result in a csv file containing the following fields: Product Id, Product - // Code, Customer Guid, Subscription Guid, Subscription Start Date, Organization, - // AWS Account Id, Given Name, Surname, Telephone Number, Email, Title, Country - // Code, ZIP Code, Operation Type, and Operation Time. + // This target has been deprecated. Specifies the data set type to be written + // to the output csv file. The data set types customer_support_contacts_data + // and test_customer_support_contacts_data both result in a csv file containing + // the following fields: Product Id, Product Code, Customer Guid, Subscription + // Guid, Subscription Start Date, Organization, AWS Account Id, Given Name, + // Surname, Telephone Number, Email, Title, Country Code, ZIP Code, Operation + // Type, and Operation Time. // // * customer_support_contacts_data Customer support contact data. The data // set will contain all changes (Creates, Updates, and Deletes) to customer @@ -542,33 +556,36 @@ type StartSupportDataExportInput struct { // DataSetType is a required field DataSetType *string `locationName:"dataSetType" min:"1" type:"string" required:"true" enum:"SupportDataSetType"` - // The name (friendly name, not ARN) of the destination S3 bucket. + // This target has been deprecated. The name (friendly name, not ARN) of the + // destination S3 bucket. // // DestinationS3BucketName is a required field DestinationS3BucketName *string `locationName:"destinationS3BucketName" min:"1" type:"string" required:"true"` - // (Optional) The desired S3 prefix for the published data set, similar to a - // directory path in standard file systems. For example, if given the bucket - // name "mybucket" and the prefix "myprefix/mydatasets", the output file "outputfile" - // would be published to "s3://mybucket/myprefix/mydatasets/outputfile". If - // the prefix directory structure does not exist, it will be created. If no - // prefix is provided, the data set will be published to the S3 bucket root. + // This target has been deprecated. (Optional) The desired S3 prefix for the + // published data set, similar to a directory path in standard file systems. + // For example, if given the bucket name "mybucket" and the prefix "myprefix/mydatasets", + // the output file "outputfile" would be published to "s3://mybucket/myprefix/mydatasets/outputfile". + // If the prefix directory structure does not exist, it will be created. If + // no prefix is provided, the data set will be published to the S3 bucket root. DestinationS3Prefix *string `locationName:"destinationS3Prefix" type:"string"` - // The start date from which to retrieve the data set in UTC. This parameter - // only affects the customer_support_contacts_data data set type. + // This target has been deprecated. The start date from which to retrieve the + // data set in UTC. This parameter only affects the customer_support_contacts_data + // data set type. // // FromDate is a required field FromDate *time.Time `locationName:"fromDate" type:"timestamp" required:"true"` - // The Amazon Resource Name (ARN) of the Role with an attached permissions policy - // to interact with the provided AWS services. + // This target has been deprecated. The Amazon Resource Name (ARN) of the Role + // with an attached permissions policy to interact with the provided AWS services. // // RoleNameArn is a required field RoleNameArn *string `locationName:"roleNameArn" min:"1" type:"string" required:"true"` - // Amazon Resource Name (ARN) for the SNS Topic that will be notified when the - // data set has been published or if an error has occurred. + // This target has been deprecated. Amazon Resource Name (ARN) for the SNS Topic + // that will be notified when the data set has been published or if an error + // has occurred. // // SnsTopicArn is a required field SnsTopicArn *string `locationName:"snsTopicArn" min:"1" type:"string" required:"true"` @@ -674,13 +691,16 @@ func (s *StartSupportDataExportInput) SetSnsTopicArn(v string) *StartSupportData return s } -// Container for the result of the StartSupportDataExport operation. +// This target has been deprecated. Container for the result of the StartSupportDataExport +// operation. +// +// Deprecated: This target has been deprecated. As of December 2022 Product Support Connection is no longer supported. type StartSupportDataExportOutput struct { - _ struct{} `type:"structure"` + _ struct{} `deprecated:"true" type:"structure"` - // A unique identifier representing a specific request to the StartSupportDataExport - // operation. This identifier can be used to correlate a request with notifications - // from the SNS topic. + // This target has been deprecated. A unique identifier representing a specific + // request to the StartSupportDataExport operation. This identifier can be used + // to correlate a request with notifications from the SNS topic. DataSetRequestId *string `locationName:"dataSetRequestId" type:"string"` } diff --git a/service/networkmanager/api.go b/service/networkmanager/api.go index d396c88f98b..564d4dac6e7 100644 --- a/service/networkmanager/api.go +++ b/service/networkmanager/api.go @@ -10341,6 +10341,9 @@ type ConnectPeer struct { // The state of the Connect peer. State *string `type:"string" enum:"ConnectPeerState"` + // The subnet ARN for the Connect peer. + SubnetArn *string `type:"string"` + // The list of key-value tags associated with the Connect peer. Tags []*Tag `type:"list"` } @@ -10405,6 +10408,12 @@ func (s *ConnectPeer) SetState(v string) *ConnectPeer { return s } +// SetSubnetArn sets the SubnetArn field's value. +func (s *ConnectPeer) SetSubnetArn(v string) *ConnectPeer { + s.SubnetArn = &v + return s +} + // SetTags sets the Tags field's value. func (s *ConnectPeer) SetTags(v []*Tag) *ConnectPeer { s.Tags = v @@ -10628,6 +10637,9 @@ type ConnectPeerSummary struct { // The Region where the edge is located. EdgeLocation *string `min:"1" type:"string"` + // The subnet ARN for the Connect peer summary. + SubnetArn *string `type:"string"` + // The list of key-value tags associated with the Connect peer summary. Tags []*Tag `type:"list"` } @@ -10686,6 +10698,12 @@ func (s *ConnectPeerSummary) SetEdgeLocation(v string) *ConnectPeerSummary { return s } +// SetSubnetArn sets the SubnetArn field's value. +func (s *ConnectPeerSummary) SetSubnetArn(v string) *ConnectPeerSummary { + s.SubnetArn = &v + return s +} + // SetTags sets the Tags field's value. func (s *ConnectPeerSummary) SetTags(v []*Tag) *ConnectPeerSummary { s.Tags = v @@ -11973,15 +11991,16 @@ type CreateConnectPeerInput struct { CoreNetworkAddress *string `min:"1" type:"string"` // The inside IP addresses used for BGP peering. - // - // InsideCidrBlocks is a required field - InsideCidrBlocks []*string `type:"list" required:"true"` + InsideCidrBlocks []*string `type:"list"` // The Connect peer address. // // PeerAddress is a required field PeerAddress *string `min:"1" type:"string" required:"true"` + // The subnet ARN for the Connect peer. + SubnetArn *string `type:"string"` + // The tags associated with the peer request. Tags []*Tag `type:"list"` } @@ -12013,9 +12032,6 @@ func (s *CreateConnectPeerInput) Validate() error { if s.CoreNetworkAddress != nil && len(*s.CoreNetworkAddress) < 1 { invalidParams.Add(request.NewErrParamMinLen("CoreNetworkAddress", 1)) } - if s.InsideCidrBlocks == nil { - invalidParams.Add(request.NewErrParamRequired("InsideCidrBlocks")) - } if s.PeerAddress == nil { invalidParams.Add(request.NewErrParamRequired("PeerAddress")) } @@ -12065,6 +12081,12 @@ func (s *CreateConnectPeerInput) SetPeerAddress(v string) *CreateConnectPeerInpu return s } +// SetSubnetArn sets the SubnetArn field's value. +func (s *CreateConnectPeerInput) SetSubnetArn(v string) *CreateConnectPeerInput { + s.SubnetArn = &v + return s +} + // SetTags sets the Tags field's value. func (s *CreateConnectPeerInput) SetTags(v []*Tag) *CreateConnectPeerInput { s.Tags = v @@ -21251,7 +21273,8 @@ type RouteTableIdentifier struct { // The segment edge in a core network. CoreNetworkSegmentEdge *CoreNetworkSegmentEdgeIdentifier `type:"structure"` - // The ARN of the transit gateway route table. + // The ARN of the transit gateway route table for the attachment request. For + // example, "TransitGatewayRouteTableArn": "arn:aws:ec2:us-west-2:123456789012:transit-gateway-route-table/tgw-rtb-9876543210123456". TransitGatewayRouteTableArn *string `type:"string"` } @@ -24170,12 +24193,16 @@ func TransitGatewayRegistrationState_Values() []string { const ( // TunnelProtocolGre is a TunnelProtocol enum value TunnelProtocolGre = "GRE" + + // TunnelProtocolNoEncap is a TunnelProtocol enum value + TunnelProtocolNoEncap = "NO_ENCAP" ) // TunnelProtocol_Values returns all elements of the TunnelProtocol enum func TunnelProtocol_Values() []string { return []string{ TunnelProtocolGre, + TunnelProtocolNoEncap, } } diff --git a/service/redshiftserverless/api.go b/service/redshiftserverless/api.go index b8c6da469f4..35991e04011 100644 --- a/service/redshiftserverless/api.go +++ b/service/redshiftserverless/api.go @@ -10267,6 +10267,11 @@ type Workgroup struct { // The namespace the workgroup is associated with. NamespaceName *string `locationName:"namespaceName" type:"string"` + // The patch version of your Amazon Redshift Serverless workgroup. For more + // information about patch versions, see Cluster versions for Amazon Redshift + // (https://docs.aws.amazon.com/redshift/latest/mgmt/cluster-versions.html). + PatchVersion *string `locationName:"patchVersion" type:"string"` + // The custom port to use when connecting to a workgroup. Valid port ranges // are 5431-5455 and 8191-8215. The default is 5439. Port *int64 `locationName:"port" type:"integer"` @@ -10292,6 +10297,11 @@ type Workgroup struct { // The name of the workgroup. WorkgroupName *string `locationName:"workgroupName" min:"3" type:"string"` + + // The Amazon Redshift Serverless version of your workgroup. For more information + // about Amazon Redshift Serverless versions, seeCluster versions for Amazon + // Redshift (https://docs.aws.amazon.com/redshift/latest/mgmt/cluster-versions.html). + WorkgroupVersion *string `locationName:"workgroupVersion" type:"string"` } // String returns the string representation. @@ -10348,6 +10358,12 @@ func (s *Workgroup) SetNamespaceName(v string) *Workgroup { return s } +// SetPatchVersion sets the PatchVersion field's value. +func (s *Workgroup) SetPatchVersion(v string) *Workgroup { + s.PatchVersion = &v + return s +} + // SetPort sets the Port field's value. func (s *Workgroup) SetPort(v int64) *Workgroup { s.Port = &v @@ -10396,6 +10412,12 @@ func (s *Workgroup) SetWorkgroupName(v string) *Workgroup { return s } +// SetWorkgroupVersion sets the WorkgroupVersion field's value. +func (s *Workgroup) SetWorkgroupVersion(v string) *Workgroup { + s.WorkgroupVersion = &v + return s +} + const ( // LogExportUseractivitylog is a LogExport enum value LogExportUseractivitylog = "useractivitylog" diff --git a/service/rekognition/api.go b/service/rekognition/api.go index 4f14c8721ec..d233bdcc53f 100644 --- a/service/rekognition/api.go +++ b/service/rekognition/api.go @@ -5008,6 +5008,100 @@ func (c *Rekognition) GetLabelDetectionPagesWithContext(ctx aws.Context, input * return p.Err() } +const opGetMediaAnalysisJob = "GetMediaAnalysisJob" + +// GetMediaAnalysisJobRequest generates a "aws/request.Request" representing the +// client's request for the GetMediaAnalysisJob operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See GetMediaAnalysisJob for more information on using the GetMediaAnalysisJob +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the GetMediaAnalysisJobRequest method. +// req, resp := client.GetMediaAnalysisJobRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +func (c *Rekognition) GetMediaAnalysisJobRequest(input *GetMediaAnalysisJobInput) (req *request.Request, output *GetMediaAnalysisJobOutput) { + op := &request.Operation{ + Name: opGetMediaAnalysisJob, + HTTPMethod: "POST", + HTTPPath: "/", + } + + if input == nil { + input = &GetMediaAnalysisJobInput{} + } + + output = &GetMediaAnalysisJobOutput{} + req = c.newRequest(op, input, output) + return +} + +// GetMediaAnalysisJob API operation for Amazon Rekognition. +// +// Retrieves the results for a given media analysis job. Takes a JobId returned +// by StartMediaAnalysisJob. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Rekognition's +// API operation GetMediaAnalysisJob for usage and error information. +// +// Returned Error Types: +// +// - AccessDeniedException +// You are not authorized to perform the action. +// +// - ResourceNotFoundException +// The resource specified in the request cannot be found. +// +// - InternalServerError +// Amazon Rekognition experienced a service issue. Try your call again. +// +// - InvalidParameterException +// Input parameter violated a constraint. Validate your parameter before calling +// the API operation again. +// +// - ProvisionedThroughputExceededException +// The number of requests exceeded your throughput limit. If you want to increase +// this limit, contact Amazon Rekognition. +// +// - ThrottlingException +// Amazon Rekognition is temporarily unable to process the request. Try your +// call again. +func (c *Rekognition) GetMediaAnalysisJob(input *GetMediaAnalysisJobInput) (*GetMediaAnalysisJobOutput, error) { + req, out := c.GetMediaAnalysisJobRequest(input) + return out, req.Send() +} + +// GetMediaAnalysisJobWithContext is the same as GetMediaAnalysisJob with the addition of +// the ability to pass a context and additional request options. +// +// See GetMediaAnalysisJob for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *Rekognition) GetMediaAnalysisJobWithContext(ctx aws.Context, input *GetMediaAnalysisJobInput, opts ...request.Option) (*GetMediaAnalysisJobOutput, error) { + req, out := c.GetMediaAnalysisJobRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opGetPersonTracking = "GetPersonTracking" // GetPersonTrackingRequest generates a "aws/request.Request" representing the @@ -6425,6 +6519,157 @@ func (c *Rekognition) ListFacesPagesWithContext(ctx aws.Context, input *ListFace return p.Err() } +const opListMediaAnalysisJobs = "ListMediaAnalysisJobs" + +// ListMediaAnalysisJobsRequest generates a "aws/request.Request" representing the +// client's request for the ListMediaAnalysisJobs operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See ListMediaAnalysisJobs for more information on using the ListMediaAnalysisJobs +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the ListMediaAnalysisJobsRequest method. +// req, resp := client.ListMediaAnalysisJobsRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +func (c *Rekognition) ListMediaAnalysisJobsRequest(input *ListMediaAnalysisJobsInput) (req *request.Request, output *ListMediaAnalysisJobsOutput) { + op := &request.Operation{ + Name: opListMediaAnalysisJobs, + HTTPMethod: "POST", + HTTPPath: "/", + Paginator: &request.Paginator{ + InputTokens: []string{"NextToken"}, + OutputTokens: []string{"NextToken"}, + LimitToken: "MaxResults", + TruncationToken: "", + }, + } + + if input == nil { + input = &ListMediaAnalysisJobsInput{} + } + + output = &ListMediaAnalysisJobsOutput{} + req = c.newRequest(op, input, output) + return +} + +// ListMediaAnalysisJobs API operation for Amazon Rekognition. +// +// Returns a list of media analysis jobs. Results are sorted by CreationTimestamp +// in descending order. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Rekognition's +// API operation ListMediaAnalysisJobs for usage and error information. +// +// Returned Error Types: +// +// - AccessDeniedException +// You are not authorized to perform the action. +// +// - InternalServerError +// Amazon Rekognition experienced a service issue. Try your call again. +// +// - InvalidParameterException +// Input parameter violated a constraint. Validate your parameter before calling +// the API operation again. +// +// - InvalidPaginationTokenException +// Pagination token in the request is not valid. +// +// - ProvisionedThroughputExceededException +// The number of requests exceeded your throughput limit. If you want to increase +// this limit, contact Amazon Rekognition. +// +// - ThrottlingException +// Amazon Rekognition is temporarily unable to process the request. Try your +// call again. +func (c *Rekognition) ListMediaAnalysisJobs(input *ListMediaAnalysisJobsInput) (*ListMediaAnalysisJobsOutput, error) { + req, out := c.ListMediaAnalysisJobsRequest(input) + return out, req.Send() +} + +// ListMediaAnalysisJobsWithContext is the same as ListMediaAnalysisJobs with the addition of +// the ability to pass a context and additional request options. +// +// See ListMediaAnalysisJobs for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *Rekognition) ListMediaAnalysisJobsWithContext(ctx aws.Context, input *ListMediaAnalysisJobsInput, opts ...request.Option) (*ListMediaAnalysisJobsOutput, error) { + req, out := c.ListMediaAnalysisJobsRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + +// ListMediaAnalysisJobsPages iterates over the pages of a ListMediaAnalysisJobs operation, +// calling the "fn" function with the response data for each page. To stop +// iterating, return false from the fn function. +// +// See ListMediaAnalysisJobs method for more information on how to use this operation. +// +// Note: This operation can generate multiple requests to a service. +// +// // Example iterating over at most 3 pages of a ListMediaAnalysisJobs operation. +// pageNum := 0 +// err := client.ListMediaAnalysisJobsPages(params, +// func(page *rekognition.ListMediaAnalysisJobsOutput, lastPage bool) bool { +// pageNum++ +// fmt.Println(page) +// return pageNum <= 3 +// }) +func (c *Rekognition) ListMediaAnalysisJobsPages(input *ListMediaAnalysisJobsInput, fn func(*ListMediaAnalysisJobsOutput, bool) bool) error { + return c.ListMediaAnalysisJobsPagesWithContext(aws.BackgroundContext(), input, fn) +} + +// ListMediaAnalysisJobsPagesWithContext same as ListMediaAnalysisJobsPages except +// it takes a Context and allows setting request options on the pages. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *Rekognition) ListMediaAnalysisJobsPagesWithContext(ctx aws.Context, input *ListMediaAnalysisJobsInput, fn func(*ListMediaAnalysisJobsOutput, bool) bool, opts ...request.Option) error { + p := request.Pagination{ + NewRequest: func() (*request.Request, error) { + var inCpy *ListMediaAnalysisJobsInput + if input != nil { + tmp := *input + inCpy = &tmp + } + req, _ := c.ListMediaAnalysisJobsRequest(inCpy) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return req, nil + }, + } + + for p.Next() { + if !fn(p.Page().(*ListMediaAnalysisJobsOutput), !p.HasNextPage()) { + break + } + } + + return p.Err() +} + const opListProjectPolicies = "ListProjectPolicies" // ListProjectPoliciesRequest generates a "aws/request.Request" representing the @@ -8366,91 +8611,83 @@ func (c *Rekognition) StartLabelDetectionWithContext(ctx aws.Context, input *Sta return out, req.Send() } -const opStartPersonTracking = "StartPersonTracking" +const opStartMediaAnalysisJob = "StartMediaAnalysisJob" -// StartPersonTrackingRequest generates a "aws/request.Request" representing the -// client's request for the StartPersonTracking operation. The "output" return +// StartMediaAnalysisJobRequest generates a "aws/request.Request" representing the +// client's request for the StartMediaAnalysisJob operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // -// See StartPersonTracking for more information on using the StartPersonTracking +// See StartMediaAnalysisJob for more information on using the StartMediaAnalysisJob // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // -// // Example sending a request using the StartPersonTrackingRequest method. -// req, resp := client.StartPersonTrackingRequest(params) +// // Example sending a request using the StartMediaAnalysisJobRequest method. +// req, resp := client.StartMediaAnalysisJobRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } -func (c *Rekognition) StartPersonTrackingRequest(input *StartPersonTrackingInput) (req *request.Request, output *StartPersonTrackingOutput) { +func (c *Rekognition) StartMediaAnalysisJobRequest(input *StartMediaAnalysisJobInput) (req *request.Request, output *StartMediaAnalysisJobOutput) { op := &request.Operation{ - Name: opStartPersonTracking, + Name: opStartMediaAnalysisJob, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { - input = &StartPersonTrackingInput{} + input = &StartMediaAnalysisJobInput{} } - output = &StartPersonTrackingOutput{} + output = &StartMediaAnalysisJobOutput{} req = c.newRequest(op, input, output) return } -// StartPersonTracking API operation for Amazon Rekognition. -// -// Starts the asynchronous tracking of a person's path in a stored video. -// -// Amazon Rekognition Video can track the path of people in a video stored in -// an Amazon S3 bucket. Use Video to specify the bucket name and the filename -// of the video. StartPersonTracking returns a job identifier (JobId) which -// you use to get the results of the operation. When label detection is finished, -// Amazon Rekognition publishes a completion status to the Amazon Simple Notification -// Service topic that you specify in NotificationChannel. +// StartMediaAnalysisJob API operation for Amazon Rekognition. // -// To get the results of the person detection operation, first check that the -// status value published to the Amazon SNS topic is SUCCEEDED. If so, call -// GetPersonTracking and pass the job identifier (JobId) from the initial call -// to StartPersonTracking. +// Initiates a new media analysis job. Accepts a manifest file in an Amazon +// S3 bucket. The output is a manifest file and a summary of the manifest stored +// in the Amazon S3 bucket. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Rekognition's -// API operation StartPersonTracking for usage and error information. +// API operation StartMediaAnalysisJob for usage and error information. // // Returned Error Types: // +// - InternalServerError +// Amazon Rekognition experienced a service issue. Try your call again. +// // - AccessDeniedException // You are not authorized to perform the action. // -// - IdempotentParameterMismatchException -// A ClientRequestToken input parameter was reused with an operation, but at -// least one of the other input parameters is different from the previous call -// to the operation. -// // - InvalidParameterException // Input parameter violated a constraint. Validate your parameter before calling // the API operation again. // +// - InvalidManifestException +// Indicates that a provided manifest file is empty or larger than the allowed +// limit. +// // - InvalidS3ObjectException // Amazon Rekognition is unable to access the S3 object specified in the request. // -// - InternalServerError -// Amazon Rekognition experienced a service issue. Try your call again. +// - ResourceNotFoundException +// The resource specified in the request cannot be found. // -// - VideoTooLargeException -// The file size or duration of the supplied media is too large. The maximum -// file size is 10GB. The maximum duration is 6 hours. +// - ResourceNotReadyException +// The requested resource isn't ready. For example, this exception occurs when +// you call DetectCustomLabels with a model version that isn't deployed. // // - ProvisionedThroughputExceededException // The number of requests exceeded your throughput limit. If you want to increase @@ -8466,38 +8703,164 @@ func (c *Rekognition) StartPersonTrackingRequest(input *StartPersonTrackingInput // - ThrottlingException // Amazon Rekognition is temporarily unable to process the request. Try your // call again. -func (c *Rekognition) StartPersonTracking(input *StartPersonTrackingInput) (*StartPersonTrackingOutput, error) { - req, out := c.StartPersonTrackingRequest(input) +// +// - IdempotentParameterMismatchException +// A ClientRequestToken input parameter was reused with an operation, but at +// least one of the other input parameters is different from the previous call +// to the operation. +func (c *Rekognition) StartMediaAnalysisJob(input *StartMediaAnalysisJobInput) (*StartMediaAnalysisJobOutput, error) { + req, out := c.StartMediaAnalysisJobRequest(input) return out, req.Send() } -// StartPersonTrackingWithContext is the same as StartPersonTracking with the addition of +// StartMediaAnalysisJobWithContext is the same as StartMediaAnalysisJob with the addition of // the ability to pass a context and additional request options. // -// See StartPersonTracking for details on how to use this API operation. +// See StartMediaAnalysisJob for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. -func (c *Rekognition) StartPersonTrackingWithContext(ctx aws.Context, input *StartPersonTrackingInput, opts ...request.Option) (*StartPersonTrackingOutput, error) { - req, out := c.StartPersonTrackingRequest(input) +func (c *Rekognition) StartMediaAnalysisJobWithContext(ctx aws.Context, input *StartMediaAnalysisJobInput, opts ...request.Option) (*StartMediaAnalysisJobOutput, error) { + req, out := c.StartMediaAnalysisJobRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } -const opStartProjectVersion = "StartProjectVersion" +const opStartPersonTracking = "StartPersonTracking" -// StartProjectVersionRequest generates a "aws/request.Request" representing the -// client's request for the StartProjectVersion operation. The "output" return +// StartPersonTrackingRequest generates a "aws/request.Request" representing the +// client's request for the StartPersonTracking operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // -// See StartProjectVersion for more information on using the StartProjectVersion +// See StartPersonTracking for more information on using the StartPersonTracking +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the StartPersonTrackingRequest method. +// req, resp := client.StartPersonTrackingRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +func (c *Rekognition) StartPersonTrackingRequest(input *StartPersonTrackingInput) (req *request.Request, output *StartPersonTrackingOutput) { + op := &request.Operation{ + Name: opStartPersonTracking, + HTTPMethod: "POST", + HTTPPath: "/", + } + + if input == nil { + input = &StartPersonTrackingInput{} + } + + output = &StartPersonTrackingOutput{} + req = c.newRequest(op, input, output) + return +} + +// StartPersonTracking API operation for Amazon Rekognition. +// +// Starts the asynchronous tracking of a person's path in a stored video. +// +// Amazon Rekognition Video can track the path of people in a video stored in +// an Amazon S3 bucket. Use Video to specify the bucket name and the filename +// of the video. StartPersonTracking returns a job identifier (JobId) which +// you use to get the results of the operation. When label detection is finished, +// Amazon Rekognition publishes a completion status to the Amazon Simple Notification +// Service topic that you specify in NotificationChannel. +// +// To get the results of the person detection operation, first check that the +// status value published to the Amazon SNS topic is SUCCEEDED. If so, call +// GetPersonTracking and pass the job identifier (JobId) from the initial call +// to StartPersonTracking. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Rekognition's +// API operation StartPersonTracking for usage and error information. +// +// Returned Error Types: +// +// - AccessDeniedException +// You are not authorized to perform the action. +// +// - IdempotentParameterMismatchException +// A ClientRequestToken input parameter was reused with an operation, but at +// least one of the other input parameters is different from the previous call +// to the operation. +// +// - InvalidParameterException +// Input parameter violated a constraint. Validate your parameter before calling +// the API operation again. +// +// - InvalidS3ObjectException +// Amazon Rekognition is unable to access the S3 object specified in the request. +// +// - InternalServerError +// Amazon Rekognition experienced a service issue. Try your call again. +// +// - VideoTooLargeException +// The file size or duration of the supplied media is too large. The maximum +// file size is 10GB. The maximum duration is 6 hours. +// +// - ProvisionedThroughputExceededException +// The number of requests exceeded your throughput limit. If you want to increase +// this limit, contact Amazon Rekognition. +// +// - LimitExceededException +// An Amazon Rekognition service limit was exceeded. For example, if you start +// too many jobs concurrently, subsequent calls to start operations (ex: StartLabelDetection) +// will raise a LimitExceededException exception (HTTP status code: 400) until +// the number of concurrently running jobs is below the Amazon Rekognition service +// limit. +// +// - ThrottlingException +// Amazon Rekognition is temporarily unable to process the request. Try your +// call again. +func (c *Rekognition) StartPersonTracking(input *StartPersonTrackingInput) (*StartPersonTrackingOutput, error) { + req, out := c.StartPersonTrackingRequest(input) + return out, req.Send() +} + +// StartPersonTrackingWithContext is the same as StartPersonTracking with the addition of +// the ability to pass a context and additional request options. +// +// See StartPersonTracking for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *Rekognition) StartPersonTrackingWithContext(ctx aws.Context, input *StartPersonTrackingInput, opts ...request.Option) (*StartPersonTrackingOutput, error) { + req, out := c.StartPersonTrackingRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + +const opStartProjectVersion = "StartProjectVersion" + +// StartProjectVersionRequest generates a "aws/request.Request" representing the +// client's request for the StartProjectVersion operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See StartProjectVersion for more information on using the StartProjectVersion // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration @@ -18300,30 +18663,14 @@ func (s *GetLabelDetectionRequestMetadata) SetSortBy(v string) *GetLabelDetectio return s } -type GetPersonTrackingInput struct { +type GetMediaAnalysisJobInput struct { _ struct{} `type:"structure"` - // The identifier for a job that tracks persons in a video. You get the JobId - // from a call to StartPersonTracking. + // Unique identifier for the media analysis job for which you want to retrieve + // results. // // JobId is a required field JobId *string `min:"1" type:"string" required:"true"` - - // Maximum number of results to return per paginated call. The largest value - // you can specify is 1000. If you specify a value greater than 1000, a maximum - // of 1000 results is returned. The default value is 1000. - MaxResults *int64 `min:"1" type:"integer"` - - // If the previous response was incomplete (because there are more persons to - // retrieve), Amazon Rekognition Video returns a pagination token in the response. - // You can use this pagination token to retrieve the next set of persons. - NextToken *string `type:"string"` - - // Sort to use for elements in the Persons array. Use TIMESTAMP to sort array - // elements by the time persons are detected. Use INDEX to sort by the tracked - // persons. If you sort by INDEX, the array elements for each person are sorted - // by detection confidence. The default sort is by TIMESTAMP. - SortBy *string `type:"string" enum:"PersonTrackingSortBy"` } // String returns the string representation. @@ -18331,7 +18678,7 @@ type GetPersonTrackingInput struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s GetPersonTrackingInput) String() string { +func (s GetMediaAnalysisJobInput) String() string { return awsutil.Prettify(s) } @@ -18340,22 +18687,19 @@ func (s GetPersonTrackingInput) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s GetPersonTrackingInput) GoString() string { +func (s GetMediaAnalysisJobInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. -func (s *GetPersonTrackingInput) Validate() error { - invalidParams := request.ErrInvalidParams{Context: "GetPersonTrackingInput"} +func (s *GetMediaAnalysisJobInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "GetMediaAnalysisJobInput"} if s.JobId == nil { invalidParams.Add(request.NewErrParamRequired("JobId")) } if s.JobId != nil && len(*s.JobId) < 1 { invalidParams.Add(request.NewErrParamMinLen("JobId", 1)) } - if s.MaxResults != nil && *s.MaxResults < 1 { - invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) - } if invalidParams.Len() > 0 { return invalidParams @@ -18364,65 +18708,62 @@ func (s *GetPersonTrackingInput) Validate() error { } // SetJobId sets the JobId field's value. -func (s *GetPersonTrackingInput) SetJobId(v string) *GetPersonTrackingInput { +func (s *GetMediaAnalysisJobInput) SetJobId(v string) *GetMediaAnalysisJobInput { s.JobId = &v return s } -// SetMaxResults sets the MaxResults field's value. -func (s *GetPersonTrackingInput) SetMaxResults(v int64) *GetPersonTrackingInput { - s.MaxResults = &v - return s -} +type GetMediaAnalysisJobOutput struct { + _ struct{} `type:"structure"` -// SetNextToken sets the NextToken field's value. -func (s *GetPersonTrackingInput) SetNextToken(v string) *GetPersonTrackingInput { - s.NextToken = &v - return s -} + // The Unix date and time when the job finished. + CompletionTimestamp *time.Time `type:"timestamp"` -// SetSortBy sets the SortBy field's value. -func (s *GetPersonTrackingInput) SetSortBy(v string) *GetPersonTrackingInput { - s.SortBy = &v - return s -} + // The Unix date and time when the job was started. + // + // CreationTimestamp is a required field + CreationTimestamp *time.Time `type:"timestamp" required:"true"` -type GetPersonTrackingOutput struct { - _ struct{} `type:"structure"` + // Details about the error that resulted in failure of the job. + FailureDetails *MediaAnalysisJobFailureDetails `type:"structure"` - // Job identifier for the person tracking operation for which you want to obtain - // results. The job identifer is returned by an initial call to StartPersonTracking. - JobId *string `min:"1" type:"string"` + // Reference to the input manifest that was provided in the job creation request. + // + // Input is a required field + Input *MediaAnalysisInput_ `type:"structure" required:"true"` - // The current status of the person tracking job. - JobStatus *string `type:"string" enum:"VideoJobStatus"` + // The identifier for the media analysis job. + // + // JobId is a required field + JobId *string `min:"1" type:"string" required:"true"` - // A job identifier specified in the call to StartCelebrityRecognition and returned - // in the job completion notification sent to your Amazon Simple Notification - // Service topic. - JobTag *string `min:"1" type:"string"` + // The name of the media analysis job. + JobName *string `min:"1" type:"string"` - // If the response is truncated, Amazon Rekognition Video returns this token - // that you can use in the subsequent request to retrieve the next set of persons. - NextToken *string `type:"string"` + // KMS Key that was provided in the creation request. + KmsKeyId *string `min:"1" type:"string"` - // An array of the persons detected in the video and the time(s) their path - // was tracked throughout the video. An array element will exist for each time - // a person's path is tracked. - Persons []*PersonDetection `type:"list"` + // The summary manifest provides statistics on input manifest and errors identified + // in the input manifest. + ManifestSummary *MediaAnalysisManifestSummary `type:"structure"` - // If the job fails, StatusMessage provides a descriptive error message. - StatusMessage *string `type:"string"` + // Operation configurations that were provided during job creation. + // + // OperationsConfig is a required field + OperationsConfig *MediaAnalysisOperationsConfig `type:"structure" required:"true"` - // Video file stored in an Amazon S3 bucket. Amazon Rekognition video start - // operations such as StartLabelDetection use Video to specify a video for analysis. - // The supported file formats are .mp4, .mov and .avi. - Video *Video `type:"structure"` + // Output configuration that was provided in the creation request. + // + // OutputConfig is a required field + OutputConfig *MediaAnalysisOutputConfig `type:"structure" required:"true"` - // Information about a video that Amazon Rekognition Video analyzed. Videometadata - // is returned in every page of paginated responses from a Amazon Rekognition - // Video operation. - VideoMetadata *VideoMetadata `type:"structure"` + // Output manifest that contains prediction results. + Results *MediaAnalysisResults `type:"structure"` + + // The current status of the media analysis job. + // + // Status is a required field + Status *string `type:"string" required:"true" enum:"MediaAnalysisJobStatus"` } // String returns the string representation. @@ -18430,7 +18771,7 @@ type GetPersonTrackingOutput struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s GetPersonTrackingOutput) String() string { +func (s GetMediaAnalysisJobOutput) String() string { return awsutil.Prettify(s) } @@ -18439,66 +18780,281 @@ func (s GetPersonTrackingOutput) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s GetPersonTrackingOutput) GoString() string { +func (s GetMediaAnalysisJobOutput) GoString() string { return s.String() } -// SetJobId sets the JobId field's value. -func (s *GetPersonTrackingOutput) SetJobId(v string) *GetPersonTrackingOutput { - s.JobId = &v +// SetCompletionTimestamp sets the CompletionTimestamp field's value. +func (s *GetMediaAnalysisJobOutput) SetCompletionTimestamp(v time.Time) *GetMediaAnalysisJobOutput { + s.CompletionTimestamp = &v return s } -// SetJobStatus sets the JobStatus field's value. -func (s *GetPersonTrackingOutput) SetJobStatus(v string) *GetPersonTrackingOutput { - s.JobStatus = &v +// SetCreationTimestamp sets the CreationTimestamp field's value. +func (s *GetMediaAnalysisJobOutput) SetCreationTimestamp(v time.Time) *GetMediaAnalysisJobOutput { + s.CreationTimestamp = &v return s } -// SetJobTag sets the JobTag field's value. -func (s *GetPersonTrackingOutput) SetJobTag(v string) *GetPersonTrackingOutput { - s.JobTag = &v +// SetFailureDetails sets the FailureDetails field's value. +func (s *GetMediaAnalysisJobOutput) SetFailureDetails(v *MediaAnalysisJobFailureDetails) *GetMediaAnalysisJobOutput { + s.FailureDetails = v return s } -// SetNextToken sets the NextToken field's value. -func (s *GetPersonTrackingOutput) SetNextToken(v string) *GetPersonTrackingOutput { - s.NextToken = &v +// SetInput sets the Input field's value. +func (s *GetMediaAnalysisJobOutput) SetInput(v *MediaAnalysisInput_) *GetMediaAnalysisJobOutput { + s.Input = v return s } -// SetPersons sets the Persons field's value. -func (s *GetPersonTrackingOutput) SetPersons(v []*PersonDetection) *GetPersonTrackingOutput { - s.Persons = v +// SetJobId sets the JobId field's value. +func (s *GetMediaAnalysisJobOutput) SetJobId(v string) *GetMediaAnalysisJobOutput { + s.JobId = &v return s } -// SetStatusMessage sets the StatusMessage field's value. -func (s *GetPersonTrackingOutput) SetStatusMessage(v string) *GetPersonTrackingOutput { - s.StatusMessage = &v +// SetJobName sets the JobName field's value. +func (s *GetMediaAnalysisJobOutput) SetJobName(v string) *GetMediaAnalysisJobOutput { + s.JobName = &v return s } -// SetVideo sets the Video field's value. -func (s *GetPersonTrackingOutput) SetVideo(v *Video) *GetPersonTrackingOutput { - s.Video = v +// SetKmsKeyId sets the KmsKeyId field's value. +func (s *GetMediaAnalysisJobOutput) SetKmsKeyId(v string) *GetMediaAnalysisJobOutput { + s.KmsKeyId = &v return s } -// SetVideoMetadata sets the VideoMetadata field's value. -func (s *GetPersonTrackingOutput) SetVideoMetadata(v *VideoMetadata) *GetPersonTrackingOutput { - s.VideoMetadata = v +// SetManifestSummary sets the ManifestSummary field's value. +func (s *GetMediaAnalysisJobOutput) SetManifestSummary(v *MediaAnalysisManifestSummary) *GetMediaAnalysisJobOutput { + s.ManifestSummary = v return s } -type GetSegmentDetectionInput struct { - _ struct{} `type:"structure"` +// SetOperationsConfig sets the OperationsConfig field's value. +func (s *GetMediaAnalysisJobOutput) SetOperationsConfig(v *MediaAnalysisOperationsConfig) *GetMediaAnalysisJobOutput { + s.OperationsConfig = v + return s +} - // Job identifier for the text detection operation for which you want results - // returned. You get the job identifer from an initial call to StartSegmentDetection. - // - // JobId is a required field - JobId *string `min:"1" type:"string" required:"true"` +// SetOutputConfig sets the OutputConfig field's value. +func (s *GetMediaAnalysisJobOutput) SetOutputConfig(v *MediaAnalysisOutputConfig) *GetMediaAnalysisJobOutput { + s.OutputConfig = v + return s +} + +// SetResults sets the Results field's value. +func (s *GetMediaAnalysisJobOutput) SetResults(v *MediaAnalysisResults) *GetMediaAnalysisJobOutput { + s.Results = v + return s +} + +// SetStatus sets the Status field's value. +func (s *GetMediaAnalysisJobOutput) SetStatus(v string) *GetMediaAnalysisJobOutput { + s.Status = &v + return s +} + +type GetPersonTrackingInput struct { + _ struct{} `type:"structure"` + + // The identifier for a job that tracks persons in a video. You get the JobId + // from a call to StartPersonTracking. + // + // JobId is a required field + JobId *string `min:"1" type:"string" required:"true"` + + // Maximum number of results to return per paginated call. The largest value + // you can specify is 1000. If you specify a value greater than 1000, a maximum + // of 1000 results is returned. The default value is 1000. + MaxResults *int64 `min:"1" type:"integer"` + + // If the previous response was incomplete (because there are more persons to + // retrieve), Amazon Rekognition Video returns a pagination token in the response. + // You can use this pagination token to retrieve the next set of persons. + NextToken *string `type:"string"` + + // Sort to use for elements in the Persons array. Use TIMESTAMP to sort array + // elements by the time persons are detected. Use INDEX to sort by the tracked + // persons. If you sort by INDEX, the array elements for each person are sorted + // by detection confidence. The default sort is by TIMESTAMP. + SortBy *string `type:"string" enum:"PersonTrackingSortBy"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetPersonTrackingInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetPersonTrackingInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *GetPersonTrackingInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "GetPersonTrackingInput"} + if s.JobId == nil { + invalidParams.Add(request.NewErrParamRequired("JobId")) + } + if s.JobId != nil && len(*s.JobId) < 1 { + invalidParams.Add(request.NewErrParamMinLen("JobId", 1)) + } + if s.MaxResults != nil && *s.MaxResults < 1 { + invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetJobId sets the JobId field's value. +func (s *GetPersonTrackingInput) SetJobId(v string) *GetPersonTrackingInput { + s.JobId = &v + return s +} + +// SetMaxResults sets the MaxResults field's value. +func (s *GetPersonTrackingInput) SetMaxResults(v int64) *GetPersonTrackingInput { + s.MaxResults = &v + return s +} + +// SetNextToken sets the NextToken field's value. +func (s *GetPersonTrackingInput) SetNextToken(v string) *GetPersonTrackingInput { + s.NextToken = &v + return s +} + +// SetSortBy sets the SortBy field's value. +func (s *GetPersonTrackingInput) SetSortBy(v string) *GetPersonTrackingInput { + s.SortBy = &v + return s +} + +type GetPersonTrackingOutput struct { + _ struct{} `type:"structure"` + + // Job identifier for the person tracking operation for which you want to obtain + // results. The job identifer is returned by an initial call to StartPersonTracking. + JobId *string `min:"1" type:"string"` + + // The current status of the person tracking job. + JobStatus *string `type:"string" enum:"VideoJobStatus"` + + // A job identifier specified in the call to StartCelebrityRecognition and returned + // in the job completion notification sent to your Amazon Simple Notification + // Service topic. + JobTag *string `min:"1" type:"string"` + + // If the response is truncated, Amazon Rekognition Video returns this token + // that you can use in the subsequent request to retrieve the next set of persons. + NextToken *string `type:"string"` + + // An array of the persons detected in the video and the time(s) their path + // was tracked throughout the video. An array element will exist for each time + // a person's path is tracked. + Persons []*PersonDetection `type:"list"` + + // If the job fails, StatusMessage provides a descriptive error message. + StatusMessage *string `type:"string"` + + // Video file stored in an Amazon S3 bucket. Amazon Rekognition video start + // operations such as StartLabelDetection use Video to specify a video for analysis. + // The supported file formats are .mp4, .mov and .avi. + Video *Video `type:"structure"` + + // Information about a video that Amazon Rekognition Video analyzed. Videometadata + // is returned in every page of paginated responses from a Amazon Rekognition + // Video operation. + VideoMetadata *VideoMetadata `type:"structure"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetPersonTrackingOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetPersonTrackingOutput) GoString() string { + return s.String() +} + +// SetJobId sets the JobId field's value. +func (s *GetPersonTrackingOutput) SetJobId(v string) *GetPersonTrackingOutput { + s.JobId = &v + return s +} + +// SetJobStatus sets the JobStatus field's value. +func (s *GetPersonTrackingOutput) SetJobStatus(v string) *GetPersonTrackingOutput { + s.JobStatus = &v + return s +} + +// SetJobTag sets the JobTag field's value. +func (s *GetPersonTrackingOutput) SetJobTag(v string) *GetPersonTrackingOutput { + s.JobTag = &v + return s +} + +// SetNextToken sets the NextToken field's value. +func (s *GetPersonTrackingOutput) SetNextToken(v string) *GetPersonTrackingOutput { + s.NextToken = &v + return s +} + +// SetPersons sets the Persons field's value. +func (s *GetPersonTrackingOutput) SetPersons(v []*PersonDetection) *GetPersonTrackingOutput { + s.Persons = v + return s +} + +// SetStatusMessage sets the StatusMessage field's value. +func (s *GetPersonTrackingOutput) SetStatusMessage(v string) *GetPersonTrackingOutput { + s.StatusMessage = &v + return s +} + +// SetVideo sets the Video field's value. +func (s *GetPersonTrackingOutput) SetVideo(v *Video) *GetPersonTrackingOutput { + s.Video = v + return s +} + +// SetVideoMetadata sets the VideoMetadata field's value. +func (s *GetPersonTrackingOutput) SetVideoMetadata(v *VideoMetadata) *GetPersonTrackingOutput { + s.VideoMetadata = v + return s +} + +type GetSegmentDetectionInput struct { + _ struct{} `type:"structure"` + + // Job identifier for the text detection operation for which you want results + // returned. You get the job identifer from an initial call to StartSegmentDetection. + // + // JobId is a required field + JobId *string `min:"1" type:"string" required:"true"` // Maximum number of results to return per paginated call. The largest value // you can specify is 1000. @@ -19849,8 +20405,9 @@ func (s *InvalidImageFormatException) RequestID() string { return s.RespMetadata.RequestID } -// Pagination token in the request is not valid. -type InvalidPaginationTokenException struct { +// Indicates that a provided manifest file is empty or larger than the allowed +// limit. +type InvalidManifestException struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` @@ -19862,7 +20419,7 @@ type InvalidPaginationTokenException struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s InvalidPaginationTokenException) String() string { +func (s InvalidManifestException) String() string { return awsutil.Prettify(s) } @@ -19871,23 +20428,23 @@ func (s InvalidPaginationTokenException) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s InvalidPaginationTokenException) GoString() string { +func (s InvalidManifestException) GoString() string { return s.String() } -func newErrorInvalidPaginationTokenException(v protocol.ResponseMetadata) error { - return &InvalidPaginationTokenException{ +func newErrorInvalidManifestException(v protocol.ResponseMetadata) error { + return &InvalidManifestException{ RespMetadata: v, } } // Code returns the exception type name. -func (s *InvalidPaginationTokenException) Code() string { - return "InvalidPaginationTokenException" +func (s *InvalidManifestException) Code() string { + return "InvalidManifestException" } // Message returns the exception's message. -func (s *InvalidPaginationTokenException) Message() string { +func (s *InvalidManifestException) Message() string { if s.Message_ != nil { return *s.Message_ } @@ -19895,27 +20452,26 @@ func (s *InvalidPaginationTokenException) Message() string { } // OrigErr always returns nil, satisfies awserr.Error interface. -func (s *InvalidPaginationTokenException) OrigErr() error { +func (s *InvalidManifestException) OrigErr() error { return nil } -func (s *InvalidPaginationTokenException) Error() string { +func (s *InvalidManifestException) Error() string { return fmt.Sprintf("%s: %s", s.Code(), s.Message()) } // Status code returns the HTTP status code for the request's response error. -func (s *InvalidPaginationTokenException) StatusCode() int { +func (s *InvalidManifestException) StatusCode() int { return s.RespMetadata.StatusCode } // RequestID returns the service's response RequestID for request. -func (s *InvalidPaginationTokenException) RequestID() string { +func (s *InvalidManifestException) RequestID() string { return s.RespMetadata.RequestID } -// Input parameter violated a constraint. Validate your parameter before calling -// the API operation again. -type InvalidParameterException struct { +// Pagination token in the request is not valid. +type InvalidPaginationTokenException struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` @@ -19927,7 +20483,7 @@ type InvalidParameterException struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s InvalidParameterException) String() string { +func (s InvalidPaginationTokenException) String() string { return awsutil.Prettify(s) } @@ -19936,23 +20492,23 @@ func (s InvalidParameterException) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s InvalidParameterException) GoString() string { +func (s InvalidPaginationTokenException) GoString() string { return s.String() } -func newErrorInvalidParameterException(v protocol.ResponseMetadata) error { - return &InvalidParameterException{ +func newErrorInvalidPaginationTokenException(v protocol.ResponseMetadata) error { + return &InvalidPaginationTokenException{ RespMetadata: v, } } // Code returns the exception type name. -func (s *InvalidParameterException) Code() string { - return "InvalidParameterException" +func (s *InvalidPaginationTokenException) Code() string { + return "InvalidPaginationTokenException" } // Message returns the exception's message. -func (s *InvalidParameterException) Message() string { +func (s *InvalidPaginationTokenException) Message() string { if s.Message_ != nil { return *s.Message_ } @@ -19960,26 +20516,27 @@ func (s *InvalidParameterException) Message() string { } // OrigErr always returns nil, satisfies awserr.Error interface. -func (s *InvalidParameterException) OrigErr() error { +func (s *InvalidPaginationTokenException) OrigErr() error { return nil } -func (s *InvalidParameterException) Error() string { +func (s *InvalidPaginationTokenException) Error() string { return fmt.Sprintf("%s: %s", s.Code(), s.Message()) } // Status code returns the HTTP status code for the request's response error. -func (s *InvalidParameterException) StatusCode() int { +func (s *InvalidPaginationTokenException) StatusCode() int { return s.RespMetadata.StatusCode } // RequestID returns the service's response RequestID for request. -func (s *InvalidParameterException) RequestID() string { +func (s *InvalidPaginationTokenException) RequestID() string { return s.RespMetadata.RequestID } -// The supplied revision id for the project policy is invalid. -type InvalidPolicyRevisionIdException struct { +// Input parameter violated a constraint. Validate your parameter before calling +// the API operation again. +type InvalidParameterException struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` @@ -19991,7 +20548,71 @@ type InvalidPolicyRevisionIdException struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s InvalidPolicyRevisionIdException) String() string { +func (s InvalidParameterException) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s InvalidParameterException) GoString() string { + return s.String() +} + +func newErrorInvalidParameterException(v protocol.ResponseMetadata) error { + return &InvalidParameterException{ + RespMetadata: v, + } +} + +// Code returns the exception type name. +func (s *InvalidParameterException) Code() string { + return "InvalidParameterException" +} + +// Message returns the exception's message. +func (s *InvalidParameterException) Message() string { + if s.Message_ != nil { + return *s.Message_ + } + return "" +} + +// OrigErr always returns nil, satisfies awserr.Error interface. +func (s *InvalidParameterException) OrigErr() error { + return nil +} + +func (s *InvalidParameterException) Error() string { + return fmt.Sprintf("%s: %s", s.Code(), s.Message()) +} + +// Status code returns the HTTP status code for the request's response error. +func (s *InvalidParameterException) StatusCode() int { + return s.RespMetadata.StatusCode +} + +// RequestID returns the service's response RequestID for request. +func (s *InvalidParameterException) RequestID() string { + return s.RespMetadata.RequestID +} + +// The supplied revision id for the project policy is invalid. +type InvalidPolicyRevisionIdException struct { + _ struct{} `type:"structure"` + RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` + + Message_ *string `locationName:"message" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s InvalidPolicyRevisionIdException) String() string { return awsutil.Prettify(s) } @@ -21169,6 +21790,103 @@ func (s *ListFacesOutput) SetNextToken(v string) *ListFacesOutput { return s } +type ListMediaAnalysisJobsInput struct { + _ struct{} `type:"structure"` + + // The maximum number of results to return per paginated call. The largest value + // user can specify is 100. If user specifies a value greater than 100, an InvalidParameterException + // error occurs. The default value is 100. + MaxResults *int64 `min:"1" type:"integer"` + + // Pagination token, if the previous response was incomplete. + NextToken *string `type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListMediaAnalysisJobsInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListMediaAnalysisJobsInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *ListMediaAnalysisJobsInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "ListMediaAnalysisJobsInput"} + if s.MaxResults != nil && *s.MaxResults < 1 { + invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetMaxResults sets the MaxResults field's value. +func (s *ListMediaAnalysisJobsInput) SetMaxResults(v int64) *ListMediaAnalysisJobsInput { + s.MaxResults = &v + return s +} + +// SetNextToken sets the NextToken field's value. +func (s *ListMediaAnalysisJobsInput) SetNextToken(v string) *ListMediaAnalysisJobsInput { + s.NextToken = &v + return s +} + +type ListMediaAnalysisJobsOutput struct { + _ struct{} `type:"structure"` + + // Contains a list of all media analysis jobs. + // + // MediaAnalysisJobs is a required field + MediaAnalysisJobs []*MediaAnalysisJobDescription `type:"list" required:"true"` + + // Pagination token, if the previous response was incomplete. + NextToken *string `type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListMediaAnalysisJobsOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListMediaAnalysisJobsOutput) GoString() string { + return s.String() +} + +// SetMediaAnalysisJobs sets the MediaAnalysisJobs field's value. +func (s *ListMediaAnalysisJobsOutput) SetMediaAnalysisJobs(v []*MediaAnalysisJobDescription) *ListMediaAnalysisJobsOutput { + s.MediaAnalysisJobs = v + return s +} + +// SetNextToken sets the NextToken field's value. +func (s *ListMediaAnalysisJobsOutput) SetNextToken(v string) *ListMediaAnalysisJobsOutput { + s.NextToken = &v + return s +} + type ListProjectPoliciesInput struct { _ struct{} `type:"structure"` @@ -21585,13 +22303,514 @@ func (s *ListUsersOutput) SetUsers(v []*User) *ListUsersOutput { type LivenessOutputConfig struct { _ struct{} `type:"structure"` - // The path to an AWS Amazon S3 bucket used to store Face Liveness session results. - // - // S3Bucket is a required field - S3Bucket *string `min:"3" type:"string" required:"true"` + // The path to an AWS Amazon S3 bucket used to store Face Liveness session results. + // + // S3Bucket is a required field + S3Bucket *string `min:"3" type:"string" required:"true"` + + // The prefix prepended to the output files for the Face Liveness session results. + S3KeyPrefix *string `type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s LivenessOutputConfig) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s LivenessOutputConfig) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *LivenessOutputConfig) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "LivenessOutputConfig"} + if s.S3Bucket == nil { + invalidParams.Add(request.NewErrParamRequired("S3Bucket")) + } + if s.S3Bucket != nil && len(*s.S3Bucket) < 3 { + invalidParams.Add(request.NewErrParamMinLen("S3Bucket", 3)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetS3Bucket sets the S3Bucket field's value. +func (s *LivenessOutputConfig) SetS3Bucket(v string) *LivenessOutputConfig { + s.S3Bucket = &v + return s +} + +// SetS3KeyPrefix sets the S3KeyPrefix field's value. +func (s *LivenessOutputConfig) SetS3KeyPrefix(v string) *LivenessOutputConfig { + s.S3KeyPrefix = &v + return s +} + +// The format of the project policy document that you supplied to PutProjectPolicy +// is incorrect. +type MalformedPolicyDocumentException struct { + _ struct{} `type:"structure"` + RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` + + Message_ *string `locationName:"message" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MalformedPolicyDocumentException) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MalformedPolicyDocumentException) GoString() string { + return s.String() +} + +func newErrorMalformedPolicyDocumentException(v protocol.ResponseMetadata) error { + return &MalformedPolicyDocumentException{ + RespMetadata: v, + } +} + +// Code returns the exception type name. +func (s *MalformedPolicyDocumentException) Code() string { + return "MalformedPolicyDocumentException" +} + +// Message returns the exception's message. +func (s *MalformedPolicyDocumentException) Message() string { + if s.Message_ != nil { + return *s.Message_ + } + return "" +} + +// OrigErr always returns nil, satisfies awserr.Error interface. +func (s *MalformedPolicyDocumentException) OrigErr() error { + return nil +} + +func (s *MalformedPolicyDocumentException) Error() string { + return fmt.Sprintf("%s: %s", s.Code(), s.Message()) +} + +// Status code returns the HTTP status code for the request's response error. +func (s *MalformedPolicyDocumentException) StatusCode() int { + return s.RespMetadata.StatusCode +} + +// RequestID returns the service's response RequestID for request. +func (s *MalformedPolicyDocumentException) RequestID() string { + return s.RespMetadata.RequestID +} + +// Contains metadata for a UserID matched with a given face. +type MatchedUser struct { + _ struct{} `type:"structure"` + + // A provided ID for the UserID. Unique within the collection. + UserId *string `min:"1" type:"string"` + + // The status of the user matched to a provided FaceID. + UserStatus *string `type:"string" enum:"UserStatus"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MatchedUser) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MatchedUser) GoString() string { + return s.String() +} + +// SetUserId sets the UserId field's value. +func (s *MatchedUser) SetUserId(v string) *MatchedUser { + s.UserId = &v + return s +} + +// SetUserStatus sets the UserStatus field's value. +func (s *MatchedUser) SetUserStatus(v string) *MatchedUser { + s.UserStatus = &v + return s +} + +// Configuration for Moderation Labels Detection. +type MediaAnalysisDetectModerationLabelsConfig struct { + _ struct{} `type:"structure"` + + // Specifies the minimum confidence level for the moderation labels to return. + // Amazon Rekognition doesn't return any labels with a confidence level lower + // than this specified value. + MinConfidence *float64 `type:"float"` + + // Specifies the custom moderation model to be used during the label detection + // job. If not provided the pre-trained model is used. + ProjectVersion *string `min:"20" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisDetectModerationLabelsConfig) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisDetectModerationLabelsConfig) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *MediaAnalysisDetectModerationLabelsConfig) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "MediaAnalysisDetectModerationLabelsConfig"} + if s.ProjectVersion != nil && len(*s.ProjectVersion) < 20 { + invalidParams.Add(request.NewErrParamMinLen("ProjectVersion", 20)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetMinConfidence sets the MinConfidence field's value. +func (s *MediaAnalysisDetectModerationLabelsConfig) SetMinConfidence(v float64) *MediaAnalysisDetectModerationLabelsConfig { + s.MinConfidence = &v + return s +} + +// SetProjectVersion sets the ProjectVersion field's value. +func (s *MediaAnalysisDetectModerationLabelsConfig) SetProjectVersion(v string) *MediaAnalysisDetectModerationLabelsConfig { + s.ProjectVersion = &v + return s +} + +// Contains input information for a media analysis job. +type MediaAnalysisInput_ struct { + _ struct{} `type:"structure"` + + // Provides the S3 bucket name and object name. + // + // The region for the S3 bucket containing the S3 object must match the region + // you use for Amazon Rekognition operations. + // + // For Amazon Rekognition to process an S3 object, the user must have permission + // to access the S3 object. For more information, see How Amazon Rekognition + // works with IAM in the Amazon Rekognition Developer Guide. + // + // S3Object is a required field + S3Object *S3Object `type:"structure" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisInput_) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisInput_) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *MediaAnalysisInput_) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "MediaAnalysisInput_"} + if s.S3Object == nil { + invalidParams.Add(request.NewErrParamRequired("S3Object")) + } + if s.S3Object != nil { + if err := s.S3Object.Validate(); err != nil { + invalidParams.AddNested("S3Object", err.(request.ErrInvalidParams)) + } + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetS3Object sets the S3Object field's value. +func (s *MediaAnalysisInput_) SetS3Object(v *S3Object) *MediaAnalysisInput_ { + s.S3Object = v + return s +} + +// Description for a media analysis job. +type MediaAnalysisJobDescription struct { + _ struct{} `type:"structure"` + + // The Unix date and time when the job finished. + CompletionTimestamp *time.Time `type:"timestamp"` + + // The Unix date and time when the job was started. + // + // CreationTimestamp is a required field + CreationTimestamp *time.Time `type:"timestamp" required:"true"` + + // Details about the error that resulted in failure of the job. + FailureDetails *MediaAnalysisJobFailureDetails `type:"structure"` + + // Reference to the input manifest that was provided in the job creation request. + // + // Input is a required field + Input *MediaAnalysisInput_ `type:"structure" required:"true"` + + // The identifier for a media analysis job. + // + // JobId is a required field + JobId *string `min:"1" type:"string" required:"true"` + + // The name of a media analysis job. + JobName *string `min:"1" type:"string"` + + // KMS Key that was provided in the creation request. + KmsKeyId *string `min:"1" type:"string"` + + // Provides statistics on input manifest and errors identified in the input + // manifest. + ManifestSummary *MediaAnalysisManifestSummary `type:"structure"` + + // Operation configurations that were provided during job creation. + // + // OperationsConfig is a required field + OperationsConfig *MediaAnalysisOperationsConfig `type:"structure" required:"true"` + + // Output configuration that was provided in the creation request. + // + // OutputConfig is a required field + OutputConfig *MediaAnalysisOutputConfig `type:"structure" required:"true"` + + // Output manifest that contains prediction results. + Results *MediaAnalysisResults `type:"structure"` + + // The status of the media analysis job being retrieved. + // + // Status is a required field + Status *string `type:"string" required:"true" enum:"MediaAnalysisJobStatus"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisJobDescription) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisJobDescription) GoString() string { + return s.String() +} + +// SetCompletionTimestamp sets the CompletionTimestamp field's value. +func (s *MediaAnalysisJobDescription) SetCompletionTimestamp(v time.Time) *MediaAnalysisJobDescription { + s.CompletionTimestamp = &v + return s +} + +// SetCreationTimestamp sets the CreationTimestamp field's value. +func (s *MediaAnalysisJobDescription) SetCreationTimestamp(v time.Time) *MediaAnalysisJobDescription { + s.CreationTimestamp = &v + return s +} + +// SetFailureDetails sets the FailureDetails field's value. +func (s *MediaAnalysisJobDescription) SetFailureDetails(v *MediaAnalysisJobFailureDetails) *MediaAnalysisJobDescription { + s.FailureDetails = v + return s +} + +// SetInput sets the Input field's value. +func (s *MediaAnalysisJobDescription) SetInput(v *MediaAnalysisInput_) *MediaAnalysisJobDescription { + s.Input = v + return s +} + +// SetJobId sets the JobId field's value. +func (s *MediaAnalysisJobDescription) SetJobId(v string) *MediaAnalysisJobDescription { + s.JobId = &v + return s +} + +// SetJobName sets the JobName field's value. +func (s *MediaAnalysisJobDescription) SetJobName(v string) *MediaAnalysisJobDescription { + s.JobName = &v + return s +} + +// SetKmsKeyId sets the KmsKeyId field's value. +func (s *MediaAnalysisJobDescription) SetKmsKeyId(v string) *MediaAnalysisJobDescription { + s.KmsKeyId = &v + return s +} + +// SetManifestSummary sets the ManifestSummary field's value. +func (s *MediaAnalysisJobDescription) SetManifestSummary(v *MediaAnalysisManifestSummary) *MediaAnalysisJobDescription { + s.ManifestSummary = v + return s +} + +// SetOperationsConfig sets the OperationsConfig field's value. +func (s *MediaAnalysisJobDescription) SetOperationsConfig(v *MediaAnalysisOperationsConfig) *MediaAnalysisJobDescription { + s.OperationsConfig = v + return s +} + +// SetOutputConfig sets the OutputConfig field's value. +func (s *MediaAnalysisJobDescription) SetOutputConfig(v *MediaAnalysisOutputConfig) *MediaAnalysisJobDescription { + s.OutputConfig = v + return s +} + +// SetResults sets the Results field's value. +func (s *MediaAnalysisJobDescription) SetResults(v *MediaAnalysisResults) *MediaAnalysisJobDescription { + s.Results = v + return s +} + +// SetStatus sets the Status field's value. +func (s *MediaAnalysisJobDescription) SetStatus(v string) *MediaAnalysisJobDescription { + s.Status = &v + return s +} + +// Details about the error that resulted in failure of the job. +type MediaAnalysisJobFailureDetails struct { + _ struct{} `type:"structure"` + + // Error code for the failed job. + Code *string `type:"string" enum:"MediaAnalysisJobFailureCode"` + + // Human readable error message. + Message *string `type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisJobFailureDetails) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisJobFailureDetails) GoString() string { + return s.String() +} + +// SetCode sets the Code field's value. +func (s *MediaAnalysisJobFailureDetails) SetCode(v string) *MediaAnalysisJobFailureDetails { + s.Code = &v + return s +} + +// SetMessage sets the Message field's value. +func (s *MediaAnalysisJobFailureDetails) SetMessage(v string) *MediaAnalysisJobFailureDetails { + s.Message = &v + return s +} + +// Summary that provides statistics on input manifest and errors identified +// in the input manifest. +type MediaAnalysisManifestSummary struct { + _ struct{} `type:"structure"` + + // Provides the S3 bucket name and object name. + // + // The region for the S3 bucket containing the S3 object must match the region + // you use for Amazon Rekognition operations. + // + // For Amazon Rekognition to process an S3 object, the user must have permission + // to access the S3 object. For more information, see How Amazon Rekognition + // works with IAM in the Amazon Rekognition Developer Guide. + S3Object *S3Object `type:"structure"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisManifestSummary) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s MediaAnalysisManifestSummary) GoString() string { + return s.String() +} + +// SetS3Object sets the S3Object field's value. +func (s *MediaAnalysisManifestSummary) SetS3Object(v *S3Object) *MediaAnalysisManifestSummary { + s.S3Object = v + return s +} + +// Configuration options for a media analysis job. Configuration is operation-specific. +type MediaAnalysisOperationsConfig struct { + _ struct{} `type:"structure"` - // The prefix prepended to the output files for the Face Liveness session results. - S3KeyPrefix *string `type:"string"` + // Contains configuration options for a DetectModerationLabels job. + DetectModerationLabels *MediaAnalysisDetectModerationLabelsConfig `type:"structure"` } // String returns the string representation. @@ -21599,7 +22818,7 @@ type LivenessOutputConfig struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s LivenessOutputConfig) String() string { +func (s MediaAnalysisOperationsConfig) String() string { return awsutil.Prettify(s) } @@ -21608,18 +22827,17 @@ func (s LivenessOutputConfig) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s LivenessOutputConfig) GoString() string { +func (s MediaAnalysisOperationsConfig) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. -func (s *LivenessOutputConfig) Validate() error { - invalidParams := request.ErrInvalidParams{Context: "LivenessOutputConfig"} - if s.S3Bucket == nil { - invalidParams.Add(request.NewErrParamRequired("S3Bucket")) - } - if s.S3Bucket != nil && len(*s.S3Bucket) < 3 { - invalidParams.Add(request.NewErrParamMinLen("S3Bucket", 3)) +func (s *MediaAnalysisOperationsConfig) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "MediaAnalysisOperationsConfig"} + if s.DetectModerationLabels != nil { + if err := s.DetectModerationLabels.Validate(); err != nil { + invalidParams.AddNested("DetectModerationLabels", err.(request.ErrInvalidParams)) + } } if invalidParams.Len() > 0 { @@ -21628,25 +22846,25 @@ func (s *LivenessOutputConfig) Validate() error { return nil } -// SetS3Bucket sets the S3Bucket field's value. -func (s *LivenessOutputConfig) SetS3Bucket(v string) *LivenessOutputConfig { - s.S3Bucket = &v +// SetDetectModerationLabels sets the DetectModerationLabels field's value. +func (s *MediaAnalysisOperationsConfig) SetDetectModerationLabels(v *MediaAnalysisDetectModerationLabelsConfig) *MediaAnalysisOperationsConfig { + s.DetectModerationLabels = v return s } -// SetS3KeyPrefix sets the S3KeyPrefix field's value. -func (s *LivenessOutputConfig) SetS3KeyPrefix(v string) *LivenessOutputConfig { - s.S3KeyPrefix = &v - return s -} +// Output configuration provided in the job creation request. +type MediaAnalysisOutputConfig struct { + _ struct{} `type:"structure"` -// The format of the project policy document that you supplied to PutProjectPolicy -// is incorrect. -type MalformedPolicyDocumentException struct { - _ struct{} `type:"structure"` - RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` + // Specifies the Amazon S3 bucket to contain the output of the media analysis + // job. + // + // S3Bucket is a required field + S3Bucket *string `min:"3" type:"string" required:"true"` - Message_ *string `locationName:"message" type:"string"` + // Specifies the Amazon S3 key prefix that comes after the name of the bucket + // you have designated for storage. + S3KeyPrefix *string `type:"string"` } // String returns the string representation. @@ -21654,7 +22872,7 @@ type MalformedPolicyDocumentException struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s MalformedPolicyDocumentException) String() string { +func (s MediaAnalysisOutputConfig) String() string { return awsutil.Prettify(s) } @@ -21663,57 +22881,51 @@ func (s MalformedPolicyDocumentException) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s MalformedPolicyDocumentException) GoString() string { +func (s MediaAnalysisOutputConfig) GoString() string { return s.String() } -func newErrorMalformedPolicyDocumentException(v protocol.ResponseMetadata) error { - return &MalformedPolicyDocumentException{ - RespMetadata: v, +// Validate inspects the fields of the type to determine if they are valid. +func (s *MediaAnalysisOutputConfig) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "MediaAnalysisOutputConfig"} + if s.S3Bucket == nil { + invalidParams.Add(request.NewErrParamRequired("S3Bucket")) } -} - -// Code returns the exception type name. -func (s *MalformedPolicyDocumentException) Code() string { - return "MalformedPolicyDocumentException" -} - -// Message returns the exception's message. -func (s *MalformedPolicyDocumentException) Message() string { - if s.Message_ != nil { - return *s.Message_ + if s.S3Bucket != nil && len(*s.S3Bucket) < 3 { + invalidParams.Add(request.NewErrParamMinLen("S3Bucket", 3)) } - return "" -} -// OrigErr always returns nil, satisfies awserr.Error interface. -func (s *MalformedPolicyDocumentException) OrigErr() error { + if invalidParams.Len() > 0 { + return invalidParams + } return nil } -func (s *MalformedPolicyDocumentException) Error() string { - return fmt.Sprintf("%s: %s", s.Code(), s.Message()) -} - -// Status code returns the HTTP status code for the request's response error. -func (s *MalformedPolicyDocumentException) StatusCode() int { - return s.RespMetadata.StatusCode +// SetS3Bucket sets the S3Bucket field's value. +func (s *MediaAnalysisOutputConfig) SetS3Bucket(v string) *MediaAnalysisOutputConfig { + s.S3Bucket = &v + return s } -// RequestID returns the service's response RequestID for request. -func (s *MalformedPolicyDocumentException) RequestID() string { - return s.RespMetadata.RequestID +// SetS3KeyPrefix sets the S3KeyPrefix field's value. +func (s *MediaAnalysisOutputConfig) SetS3KeyPrefix(v string) *MediaAnalysisOutputConfig { + s.S3KeyPrefix = &v + return s } -// Contains metadata for a UserID matched with a given face. -type MatchedUser struct { +// Contains the results for a media analysis job created with StartMediaAnalysisJob. +type MediaAnalysisResults struct { _ struct{} `type:"structure"` - // A provided ID for the UserID. Unique within the collection. - UserId *string `min:"1" type:"string"` - - // The status of the user matched to a provided FaceID. - UserStatus *string `type:"string" enum:"UserStatus"` + // Provides the S3 bucket name and object name. + // + // The region for the S3 bucket containing the S3 object must match the region + // you use for Amazon Rekognition operations. + // + // For Amazon Rekognition to process an S3 object, the user must have permission + // to access the S3 object. For more information, see How Amazon Rekognition + // works with IAM in the Amazon Rekognition Developer Guide. + S3Object *S3Object `type:"structure"` } // String returns the string representation. @@ -21721,7 +22933,7 @@ type MatchedUser struct { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s MatchedUser) String() string { +func (s MediaAnalysisResults) String() string { return awsutil.Prettify(s) } @@ -21730,19 +22942,13 @@ func (s MatchedUser) String() string { // API parameter values that are decorated as "sensitive" in the API will not // be included in the string output. The member name will be present, but the // value will be replaced with "sensitive". -func (s MatchedUser) GoString() string { +func (s MediaAnalysisResults) GoString() string { return s.String() } -// SetUserId sets the UserId field's value. -func (s *MatchedUser) SetUserId(v string) *MatchedUser { - s.UserId = &v - return s -} - -// SetUserStatus sets the UserStatus field's value. -func (s *MatchedUser) SetUserStatus(v string) *MatchedUser { - s.UserStatus = &v +// SetS3Object sets the S3Object field's value. +func (s *MediaAnalysisResults) SetS3Object(v *S3Object) *MediaAnalysisResults { + s.S3Object = v return s } @@ -25611,6 +26817,169 @@ func (s *StartLabelDetectionOutput) SetJobId(v string) *StartLabelDetectionOutpu return s } +type StartMediaAnalysisJobInput struct { + _ struct{} `type:"structure"` + + // Idempotency token used to prevent the accidental creation of duplicate versions. + // If you use the same token with multiple StartMediaAnalysisJobRequest requests, + // the same response is returned. Use ClientRequestToken to prevent the same + // request from being processed more than once. + ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` + + // Input data to be analyzed by the job. + // + // Input is a required field + Input *MediaAnalysisInput_ `type:"structure" required:"true"` + + // The name of the job. Does not have to be unique. + JobName *string `min:"1" type:"string"` + + // The identifier of customer managed AWS KMS key (name or ARN). The key is + // used to encrypt images copied into the service. The key is also used to encrypt + // results and manifest files written to the output Amazon S3 bucket. + KmsKeyId *string `min:"1" type:"string"` + + // Configuration options for the media analysis job to be created. + // + // OperationsConfig is a required field + OperationsConfig *MediaAnalysisOperationsConfig `type:"structure" required:"true"` + + // The Amazon S3 bucket location to store the results. + // + // OutputConfig is a required field + OutputConfig *MediaAnalysisOutputConfig `type:"structure" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s StartMediaAnalysisJobInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s StartMediaAnalysisJobInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *StartMediaAnalysisJobInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "StartMediaAnalysisJobInput"} + if s.ClientRequestToken != nil && len(*s.ClientRequestToken) < 1 { + invalidParams.Add(request.NewErrParamMinLen("ClientRequestToken", 1)) + } + if s.Input == nil { + invalidParams.Add(request.NewErrParamRequired("Input")) + } + if s.JobName != nil && len(*s.JobName) < 1 { + invalidParams.Add(request.NewErrParamMinLen("JobName", 1)) + } + if s.KmsKeyId != nil && len(*s.KmsKeyId) < 1 { + invalidParams.Add(request.NewErrParamMinLen("KmsKeyId", 1)) + } + if s.OperationsConfig == nil { + invalidParams.Add(request.NewErrParamRequired("OperationsConfig")) + } + if s.OutputConfig == nil { + invalidParams.Add(request.NewErrParamRequired("OutputConfig")) + } + if s.Input != nil { + if err := s.Input.Validate(); err != nil { + invalidParams.AddNested("Input", err.(request.ErrInvalidParams)) + } + } + if s.OperationsConfig != nil { + if err := s.OperationsConfig.Validate(); err != nil { + invalidParams.AddNested("OperationsConfig", err.(request.ErrInvalidParams)) + } + } + if s.OutputConfig != nil { + if err := s.OutputConfig.Validate(); err != nil { + invalidParams.AddNested("OutputConfig", err.(request.ErrInvalidParams)) + } + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetClientRequestToken sets the ClientRequestToken field's value. +func (s *StartMediaAnalysisJobInput) SetClientRequestToken(v string) *StartMediaAnalysisJobInput { + s.ClientRequestToken = &v + return s +} + +// SetInput sets the Input field's value. +func (s *StartMediaAnalysisJobInput) SetInput(v *MediaAnalysisInput_) *StartMediaAnalysisJobInput { + s.Input = v + return s +} + +// SetJobName sets the JobName field's value. +func (s *StartMediaAnalysisJobInput) SetJobName(v string) *StartMediaAnalysisJobInput { + s.JobName = &v + return s +} + +// SetKmsKeyId sets the KmsKeyId field's value. +func (s *StartMediaAnalysisJobInput) SetKmsKeyId(v string) *StartMediaAnalysisJobInput { + s.KmsKeyId = &v + return s +} + +// SetOperationsConfig sets the OperationsConfig field's value. +func (s *StartMediaAnalysisJobInput) SetOperationsConfig(v *MediaAnalysisOperationsConfig) *StartMediaAnalysisJobInput { + s.OperationsConfig = v + return s +} + +// SetOutputConfig sets the OutputConfig field's value. +func (s *StartMediaAnalysisJobInput) SetOutputConfig(v *MediaAnalysisOutputConfig) *StartMediaAnalysisJobInput { + s.OutputConfig = v + return s +} + +type StartMediaAnalysisJobOutput struct { + _ struct{} `type:"structure"` + + // Identifier for the created job. + // + // JobId is a required field + JobId *string `min:"1" type:"string" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s StartMediaAnalysisJobOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s StartMediaAnalysisJobOutput) GoString() string { + return s.String() +} + +// SetJobId sets the JobId field's value. +func (s *StartMediaAnalysisJobOutput) SetJobId(v string) *StartMediaAnalysisJobOutput { + s.JobId = &v + return s +} + type StartPersonTrackingInput struct { _ struct{} `type:"structure"` @@ -29252,6 +30621,78 @@ func LivenessSessionStatus_Values() []string { } } +const ( + // MediaAnalysisJobFailureCodeInternalError is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeInternalError = "INTERNAL_ERROR" + + // MediaAnalysisJobFailureCodeInvalidS3Object is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeInvalidS3Object = "INVALID_S3_OBJECT" + + // MediaAnalysisJobFailureCodeInvalidManifest is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeInvalidManifest = "INVALID_MANIFEST" + + // MediaAnalysisJobFailureCodeInvalidOutputConfig is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeInvalidOutputConfig = "INVALID_OUTPUT_CONFIG" + + // MediaAnalysisJobFailureCodeInvalidKmsKey is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeInvalidKmsKey = "INVALID_KMS_KEY" + + // MediaAnalysisJobFailureCodeAccessDenied is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeAccessDenied = "ACCESS_DENIED" + + // MediaAnalysisJobFailureCodeResourceNotFound is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeResourceNotFound = "RESOURCE_NOT_FOUND" + + // MediaAnalysisJobFailureCodeResourceNotReady is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeResourceNotReady = "RESOURCE_NOT_READY" + + // MediaAnalysisJobFailureCodeThrottled is a MediaAnalysisJobFailureCode enum value + MediaAnalysisJobFailureCodeThrottled = "THROTTLED" +) + +// MediaAnalysisJobFailureCode_Values returns all elements of the MediaAnalysisJobFailureCode enum +func MediaAnalysisJobFailureCode_Values() []string { + return []string{ + MediaAnalysisJobFailureCodeInternalError, + MediaAnalysisJobFailureCodeInvalidS3Object, + MediaAnalysisJobFailureCodeInvalidManifest, + MediaAnalysisJobFailureCodeInvalidOutputConfig, + MediaAnalysisJobFailureCodeInvalidKmsKey, + MediaAnalysisJobFailureCodeAccessDenied, + MediaAnalysisJobFailureCodeResourceNotFound, + MediaAnalysisJobFailureCodeResourceNotReady, + MediaAnalysisJobFailureCodeThrottled, + } +} + +const ( + // MediaAnalysisJobStatusCreated is a MediaAnalysisJobStatus enum value + MediaAnalysisJobStatusCreated = "CREATED" + + // MediaAnalysisJobStatusQueued is a MediaAnalysisJobStatus enum value + MediaAnalysisJobStatusQueued = "QUEUED" + + // MediaAnalysisJobStatusInProgress is a MediaAnalysisJobStatus enum value + MediaAnalysisJobStatusInProgress = "IN_PROGRESS" + + // MediaAnalysisJobStatusSucceeded is a MediaAnalysisJobStatus enum value + MediaAnalysisJobStatusSucceeded = "SUCCEEDED" + + // MediaAnalysisJobStatusFailed is a MediaAnalysisJobStatus enum value + MediaAnalysisJobStatusFailed = "FAILED" +) + +// MediaAnalysisJobStatus_Values returns all elements of the MediaAnalysisJobStatus enum +func MediaAnalysisJobStatus_Values() []string { + return []string{ + MediaAnalysisJobStatusCreated, + MediaAnalysisJobStatusQueued, + MediaAnalysisJobStatusInProgress, + MediaAnalysisJobStatusSucceeded, + MediaAnalysisJobStatusFailed, + } +} + const ( // OrientationCorrectionRotate0 is a OrientationCorrection enum value OrientationCorrectionRotate0 = "ROTATE_0" diff --git a/service/rekognition/errors.go b/service/rekognition/errors.go index ab622c8330e..ad4bf56fb1a 100644 --- a/service/rekognition/errors.go +++ b/service/rekognition/errors.go @@ -57,6 +57,13 @@ const ( // The provided image format is not supported. ErrCodeInvalidImageFormatException = "InvalidImageFormatException" + // ErrCodeInvalidManifestException for service response error code + // "InvalidManifestException". + // + // Indicates that a provided manifest file is empty or larger than the allowed + // limit. + ErrCodeInvalidManifestException = "InvalidManifestException" + // ErrCodeInvalidPaginationTokenException for service response error code // "InvalidPaginationTokenException". // @@ -168,6 +175,7 @@ var exceptionFromCode = map[string]func(protocol.ResponseMetadata) error{ "ImageTooLargeException": newErrorImageTooLargeException, "InternalServerError": newErrorInternalServerError, "InvalidImageFormatException": newErrorInvalidImageFormatException, + "InvalidManifestException": newErrorInvalidManifestException, "InvalidPaginationTokenException": newErrorInvalidPaginationTokenException, "InvalidParameterException": newErrorInvalidParameterException, "InvalidPolicyRevisionIdException": newErrorInvalidPolicyRevisionIdException, diff --git a/service/rekognition/examples_test.go b/service/rekognition/examples_test.go index 2263b5d18d8..43992c33e8f 100644 --- a/service/rekognition/examples_test.go +++ b/service/rekognition/examples_test.go @@ -1072,6 +1072,44 @@ func ExampleRekognition_DistributeDatasetEntries_shared00() { fmt.Println(result) } +// GetMediaAnalysisJob +// Retrieves the results for a given media analysis job. +func ExampleRekognition_GetMediaAnalysisJob_shared00() { + svc := rekognition.New(session.New()) + input := &rekognition.GetMediaAnalysisJobInput{ + JobId: aws.String("861a0645d98ef88efb75477628c011c04942d9d5f58faf2703c393c8cf8c1537"), + } + + result, err := svc.GetMediaAnalysisJob(input) + if err != nil { + if aerr, ok := err.(awserr.Error); ok { + switch aerr.Code() { + case rekognition.ErrCodeAccessDeniedException: + fmt.Println(rekognition.ErrCodeAccessDeniedException, aerr.Error()) + case rekognition.ErrCodeResourceNotFoundException: + fmt.Println(rekognition.ErrCodeResourceNotFoundException, aerr.Error()) + case rekognition.ErrCodeInternalServerError: + fmt.Println(rekognition.ErrCodeInternalServerError, aerr.Error()) + case rekognition.ErrCodeInvalidParameterException: + fmt.Println(rekognition.ErrCodeInvalidParameterException, aerr.Error()) + case rekognition.ErrCodeProvisionedThroughputExceededException: + fmt.Println(rekognition.ErrCodeProvisionedThroughputExceededException, aerr.Error()) + case rekognition.ErrCodeThrottlingException: + fmt.Println(rekognition.ErrCodeThrottlingException, aerr.Error()) + default: + fmt.Println(aerr.Error()) + } + } else { + // Print the error, cast err to awserr.Error to get the Code and + // Message from an error. + fmt.Println(err.Error()) + } + return + } + + fmt.Println(result) +} + // To add a face to a collection // This operation detects faces in an image and adds them to the specified Rekognition // collection. @@ -1303,6 +1341,44 @@ func ExampleRekognition_ListFaces_shared00() { fmt.Println(result) } +// ListMediaAnalysisJobs +// Returns a list of media analysis jobs. +func ExampleRekognition_ListMediaAnalysisJobs_shared00() { + svc := rekognition.New(session.New()) + input := &rekognition.ListMediaAnalysisJobsInput{ + MaxResults: aws.Int64(10), + } + + result, err := svc.ListMediaAnalysisJobs(input) + if err != nil { + if aerr, ok := err.(awserr.Error); ok { + switch aerr.Code() { + case rekognition.ErrCodeAccessDeniedException: + fmt.Println(rekognition.ErrCodeAccessDeniedException, aerr.Error()) + case rekognition.ErrCodeInternalServerError: + fmt.Println(rekognition.ErrCodeInternalServerError, aerr.Error()) + case rekognition.ErrCodeInvalidParameterException: + fmt.Println(rekognition.ErrCodeInvalidParameterException, aerr.Error()) + case rekognition.ErrCodeInvalidPaginationTokenException: + fmt.Println(rekognition.ErrCodeInvalidPaginationTokenException, aerr.Error()) + case rekognition.ErrCodeProvisionedThroughputExceededException: + fmt.Println(rekognition.ErrCodeProvisionedThroughputExceededException, aerr.Error()) + case rekognition.ErrCodeThrottlingException: + fmt.Println(rekognition.ErrCodeThrottlingException, aerr.Error()) + default: + fmt.Println(aerr.Error()) + } + } else { + // Print the error, cast err to awserr.Error to get the Code and + // Message from an error. + fmt.Println(err.Error()) + } + return + } + + fmt.Println(result) +} + // ListProjectPolicies // This operation lists the project policies that are attached to an Amazon Rekognition // Custom Labels project. @@ -1627,6 +1703,70 @@ func ExampleRekognition_SearchUsersByImage_shared00() { fmt.Println(result) } +// StartMediaAnalysisJob +// Initiates a new media analysis job. +func ExampleRekognition_StartMediaAnalysisJob_shared00() { + svc := rekognition.New(session.New()) + input := &rekognition.StartMediaAnalysisJobInput{ + Input: &rekognition.MediaAnalysisInput_{ + S3Object: &rekognition.S3Object{ + Bucket: aws.String("input-bucket"), + Name: aws.String("input-manifest.json"), + }, + }, + JobName: aws.String("job-name"), + OperationsConfig: &rekognition.MediaAnalysisOperationsConfig{ + DetectModerationLabels: &rekognition.MediaAnalysisDetectModerationLabelsConfig{ + MinConfidence: aws.Float64(50.000000), + ProjectVersion: aws.String("arn:aws:rekognition:us-east-1:111122223333:project/my-project/version/1/1690556751958"), + }, + }, + OutputConfig: &rekognition.MediaAnalysisOutputConfig{ + S3Bucket: aws.String("output-bucket"), + S3KeyPrefix: aws.String("output-location"), + }, + } + + result, err := svc.StartMediaAnalysisJob(input) + if err != nil { + if aerr, ok := err.(awserr.Error); ok { + switch aerr.Code() { + case rekognition.ErrCodeInternalServerError: + fmt.Println(rekognition.ErrCodeInternalServerError, aerr.Error()) + case rekognition.ErrCodeAccessDeniedException: + fmt.Println(rekognition.ErrCodeAccessDeniedException, aerr.Error()) + case rekognition.ErrCodeInvalidParameterException: + fmt.Println(rekognition.ErrCodeInvalidParameterException, aerr.Error()) + case rekognition.ErrCodeInvalidManifestException: + fmt.Println(rekognition.ErrCodeInvalidManifestException, aerr.Error()) + case rekognition.ErrCodeInvalidS3ObjectException: + fmt.Println(rekognition.ErrCodeInvalidS3ObjectException, aerr.Error()) + case rekognition.ErrCodeResourceNotFoundException: + fmt.Println(rekognition.ErrCodeResourceNotFoundException, aerr.Error()) + case rekognition.ErrCodeResourceNotReadyException: + fmt.Println(rekognition.ErrCodeResourceNotReadyException, aerr.Error()) + case rekognition.ErrCodeProvisionedThroughputExceededException: + fmt.Println(rekognition.ErrCodeProvisionedThroughputExceededException, aerr.Error()) + case rekognition.ErrCodeLimitExceededException: + fmt.Println(rekognition.ErrCodeLimitExceededException, aerr.Error()) + case rekognition.ErrCodeThrottlingException: + fmt.Println(rekognition.ErrCodeThrottlingException, aerr.Error()) + case rekognition.ErrCodeIdempotentParameterMismatchException: + fmt.Println(rekognition.ErrCodeIdempotentParameterMismatchException, aerr.Error()) + default: + fmt.Println(aerr.Error()) + } + } else { + // Print the error, cast err to awserr.Error to get the Code and + // Message from an error. + fmt.Println(err.Error()) + } + return + } + + fmt.Println(result) +} + // To start an Amazon Rekognition Custom Labels model // Starts a version of an Amazon Rekognition Custom Labels model. func ExampleRekognition_StartProjectVersion_shared00() { diff --git a/service/rekognition/rekognitioniface/interface.go b/service/rekognition/rekognitioniface/interface.go index c81cd6ebb50..2dfe98d855f 100644 --- a/service/rekognition/rekognitioniface/interface.go +++ b/service/rekognition/rekognitioniface/interface.go @@ -233,6 +233,10 @@ type RekognitionAPI interface { GetLabelDetectionPages(*rekognition.GetLabelDetectionInput, func(*rekognition.GetLabelDetectionOutput, bool) bool) error GetLabelDetectionPagesWithContext(aws.Context, *rekognition.GetLabelDetectionInput, func(*rekognition.GetLabelDetectionOutput, bool) bool, ...request.Option) error + GetMediaAnalysisJob(*rekognition.GetMediaAnalysisJobInput) (*rekognition.GetMediaAnalysisJobOutput, error) + GetMediaAnalysisJobWithContext(aws.Context, *rekognition.GetMediaAnalysisJobInput, ...request.Option) (*rekognition.GetMediaAnalysisJobOutput, error) + GetMediaAnalysisJobRequest(*rekognition.GetMediaAnalysisJobInput) (*request.Request, *rekognition.GetMediaAnalysisJobOutput) + GetPersonTracking(*rekognition.GetPersonTrackingInput) (*rekognition.GetPersonTrackingOutput, error) GetPersonTrackingWithContext(aws.Context, *rekognition.GetPersonTrackingInput, ...request.Option) (*rekognition.GetPersonTrackingOutput, error) GetPersonTrackingRequest(*rekognition.GetPersonTrackingInput) (*request.Request, *rekognition.GetPersonTrackingOutput) @@ -286,6 +290,13 @@ type RekognitionAPI interface { ListFacesPages(*rekognition.ListFacesInput, func(*rekognition.ListFacesOutput, bool) bool) error ListFacesPagesWithContext(aws.Context, *rekognition.ListFacesInput, func(*rekognition.ListFacesOutput, bool) bool, ...request.Option) error + ListMediaAnalysisJobs(*rekognition.ListMediaAnalysisJobsInput) (*rekognition.ListMediaAnalysisJobsOutput, error) + ListMediaAnalysisJobsWithContext(aws.Context, *rekognition.ListMediaAnalysisJobsInput, ...request.Option) (*rekognition.ListMediaAnalysisJobsOutput, error) + ListMediaAnalysisJobsRequest(*rekognition.ListMediaAnalysisJobsInput) (*request.Request, *rekognition.ListMediaAnalysisJobsOutput) + + ListMediaAnalysisJobsPages(*rekognition.ListMediaAnalysisJobsInput, func(*rekognition.ListMediaAnalysisJobsOutput, bool) bool) error + ListMediaAnalysisJobsPagesWithContext(aws.Context, *rekognition.ListMediaAnalysisJobsInput, func(*rekognition.ListMediaAnalysisJobsOutput, bool) bool, ...request.Option) error + ListProjectPolicies(*rekognition.ListProjectPoliciesInput) (*rekognition.ListProjectPoliciesOutput, error) ListProjectPoliciesWithContext(aws.Context, *rekognition.ListProjectPoliciesInput, ...request.Option) (*rekognition.ListProjectPoliciesOutput, error) ListProjectPoliciesRequest(*rekognition.ListProjectPoliciesInput) (*request.Request, *rekognition.ListProjectPoliciesOutput) @@ -355,6 +366,10 @@ type RekognitionAPI interface { StartLabelDetectionWithContext(aws.Context, *rekognition.StartLabelDetectionInput, ...request.Option) (*rekognition.StartLabelDetectionOutput, error) StartLabelDetectionRequest(*rekognition.StartLabelDetectionInput) (*request.Request, *rekognition.StartLabelDetectionOutput) + StartMediaAnalysisJob(*rekognition.StartMediaAnalysisJobInput) (*rekognition.StartMediaAnalysisJobOutput, error) + StartMediaAnalysisJobWithContext(aws.Context, *rekognition.StartMediaAnalysisJobInput, ...request.Option) (*rekognition.StartMediaAnalysisJobOutput, error) + StartMediaAnalysisJobRequest(*rekognition.StartMediaAnalysisJobInput) (*request.Request, *rekognition.StartMediaAnalysisJobOutput) + StartPersonTracking(*rekognition.StartPersonTrackingInput) (*rekognition.StartPersonTrackingOutput, error) StartPersonTrackingWithContext(aws.Context, *rekognition.StartPersonTrackingInput, ...request.Option) (*rekognition.StartPersonTrackingOutput, error) StartPersonTrackingRequest(*rekognition.StartPersonTrackingInput) (*request.Request, *rekognition.StartPersonTrackingOutput)