From 234209222b1a0c1e1168a852a99df0a84ec9443f Mon Sep 17 00:00:00 2001 From: Jonas Trollvik Date: Fri, 24 Jan 2020 01:48:16 +0100 Subject: [PATCH] Implementation of s3 using new archival interface (#2991) * Initial commit of s3 using new archival interface * Changes after first review - Remove RunID from visibility query since there is no point in using this. Also the indexing is not set up for this to work - Add metrics to visibility and history stores - Update README with instructions on how data is stored and query syntax * Add some notes about how the query syntax works * Update README * Validate URI against s3. Cleanup Remove redundant declarations of ensureContextTimeout and add it to missing functions --- common/archiver/provider/provider.go | 64 +- common/archiver/s3store/README.md | 101 + common/archiver/s3store/historyArchiver.go | 346 + .../archiver/s3store/historyArchiver_test.go | 666 ++ common/archiver/s3store/mocks/S3API.go | 7214 +++++++++++++++++ common/archiver/s3store/queryParser.go | 226 + common/archiver/s3store/queryParser_mock.go | 72 + common/archiver/s3store/queryParser_test.go | 268 + common/archiver/s3store/util.go | 237 + common/archiver/s3store/visibilityArchiver.go | 260 + .../s3store/visibilityArchiver_test.go | 558 ++ common/service/config/config.go | 9 + go.mod | 1 + go.sum | 4 + 14 files changed, 10003 insertions(+), 23 deletions(-) create mode 100644 common/archiver/s3store/README.md create mode 100644 common/archiver/s3store/historyArchiver.go create mode 100644 common/archiver/s3store/historyArchiver_test.go create mode 100644 common/archiver/s3store/mocks/S3API.go create mode 100644 common/archiver/s3store/queryParser.go create mode 100644 common/archiver/s3store/queryParser_mock.go create mode 100644 common/archiver/s3store/queryParser_test.go create mode 100644 common/archiver/s3store/util.go create mode 100644 common/archiver/s3store/visibilityArchiver.go create mode 100644 common/archiver/s3store/visibilityArchiver_test.go diff --git a/common/archiver/provider/provider.go b/common/archiver/provider/provider.go index 06974536f82..5afba522785 100644 --- a/common/archiver/provider/provider.go +++ b/common/archiver/provider/provider.go @@ -26,6 +26,7 @@ import ( "github.com/uber/cadence/common/archiver" "github.com/uber/cadence/common/archiver/filestore" + "github.com/uber/cadence/common/archiver/s3store" "github.com/uber/cadence/common/service/config" ) @@ -126,26 +127,33 @@ func (p *archiverProvider) GetHistoryArchiver(scheme, serviceName string) (archi if !ok { return nil, ErrBootstrapContainerNotFound } + var historyArchiver archiver.HistoryArchiver + var err error switch scheme { case filestore.URIScheme: if p.historyArchiverConfigs.Filestore == nil { return nil, ErrArchiverConfigNotFound } - historyArchiver, err := filestore.NewHistoryArchiver(container, p.historyArchiverConfigs.Filestore) - if err != nil { - return nil, err - } - - p.Lock() - defer p.Unlock() - if existingHistoryArchiver, ok := p.historyArchivers[archiverKey]; ok { - return existingHistoryArchiver, nil + historyArchiver, err = filestore.NewHistoryArchiver(container, p.historyArchiverConfigs.Filestore) + case s3store.URIScheme: + if p.historyArchiverConfigs.S3store == nil { + return nil, ErrArchiverConfigNotFound } - p.historyArchivers[archiverKey] = historyArchiver - return historyArchiver, nil + historyArchiver, err = s3store.NewHistoryArchiver(container, p.historyArchiverConfigs.S3store) + default: + return nil, ErrUnknownScheme + } + if err != nil { + return nil, err } - return nil, ErrUnknownScheme + p.Lock() + defer p.Unlock() + if existingHistoryArchiver, ok := p.historyArchivers[archiverKey]; ok { + return existingHistoryArchiver, nil + } + p.historyArchivers[archiverKey] = historyArchiver + return historyArchiver, nil } func (p *archiverProvider) GetVisibilityArchiver(scheme, serviceName string) (archiver.VisibilityArchiver, error) { @@ -162,25 +170,35 @@ func (p *archiverProvider) GetVisibilityArchiver(scheme, serviceName string) (ar return nil, ErrBootstrapContainerNotFound } + var visibilityArchiver archiver.VisibilityArchiver + var err error + switch scheme { case filestore.URIScheme: if p.visibilityArchiverConfigs.Filestore == nil { return nil, ErrArchiverConfigNotFound } - visibilityArchiver, err := filestore.NewVisibilityArchiver(container, p.visibilityArchiverConfigs.Filestore) - if err != nil { - return nil, err + visibilityArchiver, err = filestore.NewVisibilityArchiver(container, p.visibilityArchiverConfigs.Filestore) + case s3store.URIScheme: + if p.visibilityArchiverConfigs.S3store == nil { + return nil, ErrArchiverConfigNotFound } + visibilityArchiver, err = s3store.NewVisibilityArchiver(container, p.visibilityArchiverConfigs.S3store) + default: + return nil, ErrUnknownScheme + } + if err != nil { + return nil, err + } - p.Lock() - defer p.Unlock() - if existingVisibilityArchiver, ok := p.visibilityArchivers[archiverKey]; ok { - return existingVisibilityArchiver, nil - } - p.visibilityArchivers[archiverKey] = visibilityArchiver - return visibilityArchiver, nil + p.Lock() + defer p.Unlock() + if existingVisibilityArchiver, ok := p.visibilityArchivers[archiverKey]; ok { + return existingVisibilityArchiver, nil } - return nil, ErrUnknownScheme + p.visibilityArchivers[archiverKey] = visibilityArchiver + return visibilityArchiver, nil + } func (p *archiverProvider) getArchiverKey(scheme, serviceName string) string { diff --git a/common/archiver/s3store/README.md b/common/archiver/s3store/README.md new file mode 100644 index 00000000000..fff81097a35 --- /dev/null +++ b/common/archiver/s3store/README.md @@ -0,0 +1,101 @@ +# Amazon S3 blobstore +## Configuration +See https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials on how to set up authentication against s3 + +Enabling archival is done by using the configuration below. `Region` and `bucket URI` are required +``` +archival: + history: + status: "enabled" + enableRead: true + provider: + s3store: + region: "us-east-1" + visibility: + status: "enabled" + enableRead: true + provider: + s3store: + region: "us-east-1" + +domainDefaults: + archival: + history: + status: "enabled" + URI: "s3://" + visibility: + status: "enabled" + URI: "s3://" +``` + +## Visibility query syntax +You can query the visibility store by using the `cadence workflow listarchived` command + +The syntax for the query is based on SQL + +Supported column names are +- WorkflowID *String* +- StartTime *Date* +- CloseTime *Date* +- SearchPrecision *String - Day, Hour, Minute, Second* + +WorkflowID and SearchPrecision are always required. One of StartTime and CloseTime are required and they are mutually exclusive. + +Searching for a record will be done in times in the UTC timezone + +SearchPrecision specifies what range you want to search for records. If you use `SearchPrecision = 'Day'` +it will search all records starting from `2020-01-21T00:00:00Z` to `2020-01-21T59:59:59Z` + +### Limitations + +- The only operator supported is `=` due to how records are stored in s3. + +### Example + +*Searches for all records done in day 2020-01-21 with the specified workflow id* + +`./cadence --do samples-domain workflow listarchived -q "StartTime = '2020-01-21T00:00:00Z' AND WorkflowID='workflow-id' AND SearchPrecision='Day'"` +## Storage in S3 +Workflow runs are stored in s3 using the following structure +``` +s3:///// + history/ + visibility/ + startTimeout/2020-01-21T16:16:11Z/ + closeTimeout/2020-01-21T16:16:11Z/ +``` + +## Using localstack for local development +1. Install awscli from [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) +2. Install localstack from [here](https://github.com/localstack/localstack#installing) +3. Launch localstack with `SERVICES=s3 localstack start` +4. Create a bucket using `aws --endpoint-url=http://localhost:4572 s3 mb s3://cadence-development` +5. Configure archival and domainDefaults with the following configuration +``` +archival: + history: + status: "enabled" + enableRead: true + provider: + s3store: + region: "us-east-1" + endpoint: "http://127.0.0.1:4572" + s3ForcePathStyle: true + visibility: + status: "enabled" + enableRead: true + provider: + s3store: + region: "us-east-1" + endpoint: "http://127.0.0.1:4572" + s3ForcePathStyle: true + +domainDefaults: + archival: + history: + status: "enabled" + URI: "s3://cadence-development" + visibility: + status: "enabled" + URI: "s3://cadence-development" +``` diff --git a/common/archiver/s3store/historyArchiver.go b/common/archiver/s3store/historyArchiver.go new file mode 100644 index 00000000000..0e710c44c1d --- /dev/null +++ b/common/archiver/s3store/historyArchiver.go @@ -0,0 +1,346 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// S3 History Archiver will archive workflow histories to amazon s3 + +package s3store + +import ( + "context" + "encoding/binary" + "errors" + "strconv" + "strings" + "time" + + "github.com/uber/cadence/common/metrics" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/request" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3iface" + + "github.com/uber/cadence/.gen/go/shared" + "github.com/uber/cadence/common" + "github.com/uber/cadence/common/archiver" + "github.com/uber/cadence/common/backoff" + "github.com/uber/cadence/common/log/tag" + "github.com/uber/cadence/common/service/config" +) + +const ( + // URIScheme is the scheme for the s3 implementation + URIScheme = "s3" + errEncodeHistory = "failed to encode history batches" + errWriteKey = "failed to write history to s3" + defaultBlobstoreTimeout = 60 * time.Second + targetHistoryBlobSize = 2 * 1024 * 1024 // 2MB +) + +var ( + errNoBucketSpecified = errors.New("no bucket specified") + errBucketNotExists = errors.New("requested bucket does not exist") + errEmptyAwsRegion = errors.New("empty aws region") +) + +type ( + historyArchiver struct { + container *archiver.HistoryBootstrapContainer + s3cli s3iface.S3API + // only set in test code + historyIterator archiver.HistoryIterator + config *config.S3Archiver + } + + getHistoryToken struct { + CloseFailoverVersion int64 + NextBatchIdx int + } +) + +// NewHistoryArchiver creates a new archiver.HistoryArchiver based on s3 +func NewHistoryArchiver( + container *archiver.HistoryBootstrapContainer, + config *config.S3Archiver, +) (archiver.HistoryArchiver, error) { + return newHistoryArchiver(container, config, nil) +} + +func newHistoryArchiver( + container *archiver.HistoryBootstrapContainer, + config *config.S3Archiver, + historyIterator archiver.HistoryIterator, +) (*historyArchiver, error) { + if len(config.Region) == 0 { + return nil, errEmptyAwsRegion + } + s3Config := &aws.Config{ + Endpoint: config.Endpoint, + Region: aws.String(config.Region), + S3ForcePathStyle: aws.Bool(config.S3ForcePathStyle), + MaxRetries: aws.Int(0), + } + sess, err := session.NewSession(s3Config) + if err != nil { + return nil, err + } + + return &historyArchiver{ + container: container, + s3cli: s3.New(sess), + historyIterator: historyIterator, + }, nil +} +func (h *historyArchiver) Archive( + ctx context.Context, + URI archiver.URI, + request *archiver.ArchiveHistoryRequest, + opts ...archiver.ArchiveOption, +) (err error) { + scope := h.container.MetricsClient.Scope(metrics.HistoryArchiverScope, metrics.DomainTag(request.DomainName)) + featureCatalog := archiver.GetFeatureCatalog(opts...) + sw := scope.StartTimer(metrics.CadenceLatency) + defer func() { + sw.Stop() + if err != nil { + if common.IsPersistenceTransientError(err) || isRetryableError(err) { + scope.IncCounter(metrics.HistoryArchiverArchiveTransientErrorCount) + } else { + scope.IncCounter(metrics.HistoryArchiverArchiveNonRetryableErrorCount) + if featureCatalog.NonRetriableError != nil { + err = featureCatalog.NonRetriableError() + } + } + } + }() + + logger := archiver.TagLoggerWithArchiveHistoryRequestAndURI(h.container.Logger, request, URI.String()) + + if err := softValidateURI(URI); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(archiver.ErrReasonInvalidURI), tag.Error(err)) + return err + } + + if err := archiver.ValidateHistoryArchiveRequest(request); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(archiver.ErrReasonInvalidArchiveRequest), tag.Error(err)) + return err + } + + historyIterator := h.historyIterator + if historyIterator == nil { // will only be set by testing code + historyIterator = archiver.NewHistoryIterator(request, h.container.HistoryV2Manager, targetHistoryBlobSize) + } + + historyBatches := []*shared.History{} + for historyIterator.HasNext() { + historyBlob, err := getNextHistoryBlob(ctx, historyIterator) + if err != nil { + logger := logger.WithTags(tag.ArchivalArchiveFailReason(archiver.ErrReasonReadHistory), tag.Error(err)) + if common.IsPersistenceTransientError(err) { + logger.Error(archiver.ArchiveTransientErrorMsg) + } else { + logger.Error(archiver.ArchiveNonRetriableErrorMsg) + } + return err + } + + if historyMutated(request, historyBlob.Body, *historyBlob.Header.IsLast) { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(archiver.ErrReasonHistoryMutated)) + return archiver.ErrHistoryMutated + } + + historyBatches = append(historyBatches, historyBlob.Body...) + } + + encodedHistoryBatches, err := encode(historyBatches) + if err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(errEncodeHistory), tag.Error(err)) + return err + } + + key := constructHistoryKey(URI.Path(), request.DomainID, request.WorkflowID, request.RunID, request.CloseFailoverVersion) + + if err := upload(ctx, h.s3cli, URI, key, encodedHistoryBatches); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(errWriteKey), tag.Error(err)) + return err + } + totalUploadSize := int64(binary.Size(encodedHistoryBatches)) + scope.AddCounter(metrics.HistoryArchiverTotalUploadSize, totalUploadSize) + scope.AddCounter(metrics.HistoryArchiverHistorySize, totalUploadSize) + scope.IncCounter(metrics.HistoryArchiverArchiveSuccessCount) + + return nil +} + +func (h *historyArchiver) Get( + ctx context.Context, + URI archiver.URI, + request *archiver.GetHistoryRequest, +) (*archiver.GetHistoryResponse, error) { + if err := softValidateURI(URI); err != nil { + return nil, &shared.BadRequestError{Message: archiver.ErrInvalidURI.Error()} + } + + if err := archiver.ValidateGetRequest(request); err != nil { + return nil, &shared.BadRequestError{Message: archiver.ErrInvalidGetHistoryRequest.Error()} + } + + var err error + var token *getHistoryToken + if request.NextPageToken != nil { + token, err = deserializeGetHistoryToken(request.NextPageToken) + if err != nil { + return nil, &shared.BadRequestError{Message: archiver.ErrNextPageTokenCorrupted.Error()} + } + } else if request.CloseFailoverVersion != nil { + token = &getHistoryToken{ + CloseFailoverVersion: *request.CloseFailoverVersion, + NextBatchIdx: 0, + } + } else { + highestVersion, err := h.getHighestVersion(ctx, URI, request) + if err != nil { + return nil, &shared.BadRequestError{Message: err.Error()} + } + token = &getHistoryToken{ + CloseFailoverVersion: *highestVersion, + NextBatchIdx: 0, + } + } + + key := constructHistoryKey(URI.Path(), request.DomainID, request.WorkflowID, request.RunID, token.CloseFailoverVersion) + encodedHistoryBatches, err := download(ctx, h.s3cli, URI, key) + if err != nil { + return nil, err + } + historyBatches, err := decodeHistoryBatches(encodedHistoryBatches) + if err != nil { + return nil, &shared.InternalServiceError{Message: err.Error()} + } + historyBatches = historyBatches[token.NextBatchIdx:] + + response := &archiver.GetHistoryResponse{} + numOfEvents := 0 + numOfBatches := 0 + for _, batch := range historyBatches { + response.HistoryBatches = append(response.HistoryBatches, batch) + numOfBatches++ + numOfEvents += len(batch.Events) + if numOfEvents >= request.PageSize { + break + } + } + + if numOfBatches < len(historyBatches) { + token.NextBatchIdx += numOfBatches + nextToken, err := serializeToken(token) + if err != nil { + return nil, &shared.InternalServiceError{Message: err.Error()} + } + response.NextPageToken = nextToken + } + + return response, nil +} + +func (h *historyArchiver) ValidateURI(URI archiver.URI) error { + err := softValidateURI(URI) + if err != nil { + return err + } + return bucketExists(context.TODO(), h.s3cli, URI) +} + +func getNextHistoryBlob(ctx context.Context, historyIterator archiver.HistoryIterator) (*archiver.HistoryBlob, error) { + historyBlob, err := historyIterator.Next() + op := func() error { + historyBlob, err = historyIterator.Next() + return err + } + for err != nil { + if !common.IsPersistenceTransientError(err) { + return nil, err + } + if contextExpired(ctx) { + return nil, archiver.ErrContextTimeout + } + err = backoff.Retry(op, common.CreatePersistanceRetryPolicy(), common.IsPersistenceTransientError) + } + return historyBlob, nil +} + +func (h *historyArchiver) getHighestVersion(ctx context.Context, URI archiver.URI, request *archiver.GetHistoryRequest) (*int64, error) { + ctx, cancel := ensureContextTimeout(ctx) + defer cancel() + var prefix = constructHistoryKeyPrefix(URI.Path(), request.DomainID, request.WorkflowID, request.RunID) + "/" + results, err := h.s3cli.ListObjectsV2WithContext(ctx, &s3.ListObjectsV2Input{ + Bucket: aws.String(URI.Hostname()), + Prefix: aws.String(prefix), + }) + if err != nil { + if aerr, ok := err.(awserr.Error); ok && aerr.Code() == s3.ErrCodeNoSuchBucket { + return nil, &shared.BadRequestError{Message: errBucketNotExists.Error()} + } + return nil, err + } + var highestVersion *int64 + + for _, v := range results.Contents { + var version int64 + version, err = strconv.ParseInt(strings.Replace(*v.Key, prefix, "", 1), 10, 64) + if err != nil { + continue + } + if highestVersion == nil || version > *highestVersion { + highestVersion = &version + } + } + if highestVersion == nil { + return nil, archiver.ErrHistoryNotExist + } + return highestVersion, nil +} + +func isRetryableError(err error) bool { + if err == nil { + return false + } + if aerr, ok := err.(awserr.Error); ok { + return isStatusCodeRetryable(aerr) || request.IsErrorRetryable(aerr) || request.IsErrorThrottle(aerr) + } + return false +} + +func isStatusCodeRetryable(err error) bool { + if aerr, ok := err.(awserr.Error); ok { + if rerr, ok := err.(awserr.RequestFailure); ok { + if rerr.StatusCode() == 429 { + return true + } + if rerr.StatusCode() >= 500 && rerr.StatusCode() != 501 { + return true + } + } + return isStatusCodeRetryable(aerr.OrigErr()) + } + return false +} diff --git a/common/archiver/s3store/historyArchiver_test.go b/common/archiver/s3store/historyArchiver_test.go new file mode 100644 index 00000000000..9eceb2287a9 --- /dev/null +++ b/common/archiver/s3store/historyArchiver_test.go @@ -0,0 +1,666 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package s3store + +import ( + "bytes" + "context" + "errors" + "fmt" + "io/ioutil" + "sort" + "strconv" + "strings" + "testing" + "time" + + "github.com/uber-go/tally" + + "github.com/uber/cadence/common/metrics" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/request" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/stretchr/testify/mock" + + "github.com/uber/cadence/common/archiver/s3store/mocks" + + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + "go.uber.org/zap" + + "github.com/uber/cadence/.gen/go/shared" + "github.com/uber/cadence/common" + "github.com/uber/cadence/common/archiver" + "github.com/uber/cadence/common/log" + "github.com/uber/cadence/common/log/loggerimpl" +) + +const ( + testDomainID = "test-domain-id" + testDomainName = "test-domain-name" + testWorkflowID = "test-workflow-id" + testRunID = "test-run-id" + testNextEventID = 1800 + testCloseFailoverVersion = 100 + testPageSize = 100 + testBucket = "test-bucket" + testBucketURI = "s3://test-bucket" +) + +var ( + testBranchToken = []byte{1, 2, 3} +) + +type historyArchiverSuite struct { + *require.Assertions + suite.Suite + s3cli *mocks.S3API + container *archiver.HistoryBootstrapContainer + logger log.Logger + testArchivalURI archiver.URI + historyBatchesV1 []*shared.History + historyBatchesV100 []*shared.History +} + +func TestHistoryArchiverSuite(t *testing.T) { + suite.Run(t, new(historyArchiverSuite)) +} + +func (s *historyArchiverSuite) SetupSuite() { + var err error + s.s3cli = &mocks.S3API{} + setupFsEmulation(s.s3cli) + s.setupHistoryDirectory() + s.testArchivalURI, err = archiver.NewURI(testBucketURI) + + s.Require().NoError(err) +} + +func (s *historyArchiverSuite) TearDownSuite() { +} + +func (s *historyArchiverSuite) SetupTest() { + scope := tally.NewTestScope("test", nil) + s.Assertions = require.New(s.T()) + zapLogger := zap.NewNop() + s.container = &archiver.HistoryBootstrapContainer{ + Logger: loggerimpl.NewLogger(zapLogger), + MetricsClient: metrics.NewClient(scope, metrics.HistoryArchiverScope), + } +} + +func setupFsEmulation(s3cli *mocks.S3API) { + fs := make(map[string][]byte) + + putObjectFn := func(_ aws.Context, input *s3.PutObjectInput, _ ...request.Option) *s3.PutObjectOutput { + buf := new(bytes.Buffer) + buf.ReadFrom(input.Body) + fs[*input.Bucket+*input.Key] = buf.Bytes() + return &s3.PutObjectOutput{} + } + getObjectFn := func(_ aws.Context, input *s3.GetObjectInput, _ ...request.Option) *s3.GetObjectOutput { + return &s3.GetObjectOutput{ + Body: ioutil.NopCloser(bytes.NewReader(fs[*input.Bucket+*input.Key])), + } + } + s3cli.On("ListObjectsV2WithContext", mock.Anything, mock.Anything). + Return(func(_ context.Context, input *s3.ListObjectsV2Input, opts ...request.Option) *s3.ListObjectsV2Output { + objects := make([]*s3.Object, 0) + for k := range fs { + if strings.HasPrefix(k, *input.Bucket+*input.Prefix) { + objects = append(objects, &s3.Object{ + Key: aws.String(k[len(*input.Bucket):]), + }) + } + } + sort.SliceStable(objects, func(i, j int) bool { + return *objects[i].Key < *objects[j].Key + }) + maxKeys := 1000 + if input.MaxKeys != nil { + maxKeys = int(*input.MaxKeys) + } + start := 0 + if input.ContinuationToken != nil { + start, _ = strconv.Atoi(*input.ContinuationToken) + } + + isTruncated := false + var nextContinuationToken *string + if len(objects) > start+maxKeys { + isTruncated = true + nextContinuationToken = common.StringPtr(fmt.Sprintf("%d", start+maxKeys)) + objects = objects[start : start+maxKeys] + } else { + objects = objects[start:] + } + + return &s3.ListObjectsV2Output{ + Contents: objects, + IsTruncated: &isTruncated, + NextContinuationToken: nextContinuationToken, + } + }, nil) + s3cli.On("PutObjectWithContext", mock.Anything, mock.Anything).Return(putObjectFn, nil) + + s3cli.On("GetObjectWithContext", mock.Anything, mock.MatchedBy(func(input *s3.GetObjectInput) bool { + _, ok := fs[*input.Bucket+*input.Key] + return !ok + })).Return(nil, awserr.New(s3.ErrCodeNoSuchKey, "", nil)) + s3cli.On("GetObjectWithContext", mock.Anything, mock.Anything).Return(getObjectFn, nil) +} + +func (s *historyArchiverSuite) TestValidateURI() { + testCases := []struct { + URI string + expectedErr error + }{ + { + URI: "wrongscheme:///a/b/c", + expectedErr: archiver.ErrURISchemeMismatch, + }, + { + URI: "s3://", + expectedErr: errNoBucketSpecified, + }, + { + URI: "s3://bucket/a/b/c", + expectedErr: errBucketNotExists, + }, + { + URI: testBucketURI, + expectedErr: nil, + }, + } + + s.s3cli.On("HeadBucketWithContext", mock.Anything, mock.MatchedBy(func(input *s3.HeadBucketInput) bool { + return *input.Bucket != s.testArchivalURI.Hostname() + })).Return(nil, awserr.New("NotFound", "", nil)) + s.s3cli.On("HeadBucketWithContext", mock.Anything, mock.Anything).Return(&s3.HeadBucketOutput{}, nil) + + historyArchiver := s.newTestHistoryArchiver(nil) + for _, tc := range testCases { + URI, err := archiver.NewURI(tc.URI) + s.NoError(err) + s.Equal(tc.expectedErr, historyArchiver.ValidateURI(URI)) + } +} + +func (s *historyArchiverSuite) TestArchive_Fail_InvalidURI() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + URI, err := archiver.NewURI("wrongscheme://") + s.NoError(err) + err = historyArchiver.Archive(context.Background(), URI, request) + s.Error(err) +} + +func (s *historyArchiverSuite) TestArchive_Fail_InvalidRequest() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: "", // an invalid request + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + err := historyArchiver.Archive(context.Background(), s.testArchivalURI, request) + s.Error(err) +} + +func (s *historyArchiverSuite) TestArchive_Fail_ErrorOnReadHistory() { + mockCtrl := gomock.NewController(s.T()) + defer mockCtrl.Finish() + historyIterator := archiver.NewMockHistoryIterator(mockCtrl) + gomock.InOrder( + historyIterator.EXPECT().HasNext().Return(true), + historyIterator.EXPECT().Next().Return(nil, errors.New("some random error")), + ) + + historyArchiver := s.newTestHistoryArchiver(historyIterator) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + err := historyArchiver.Archive(context.Background(), s.testArchivalURI, request) + s.Error(err) +} + +func (s *historyArchiverSuite) TestArchive_Fail_TimeoutWhenReadingHistory() { + mockCtrl := gomock.NewController(s.T()) + defer mockCtrl.Finish() + historyIterator := archiver.NewMockHistoryIterator(mockCtrl) + gomock.InOrder( + historyIterator.EXPECT().HasNext().Return(true), + historyIterator.EXPECT().Next().Return(nil, &shared.ServiceBusyError{}), + ) + + historyArchiver := s.newTestHistoryArchiver(historyIterator) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + err := historyArchiver.Archive(getCanceledContext(), s.testArchivalURI, request) + s.Error(err) +} + +func (s *historyArchiverSuite) TestArchive_Fail_HistoryMutated() { + mockCtrl := gomock.NewController(s.T()) + defer mockCtrl.Finish() + historyIterator := archiver.NewMockHistoryIterator(mockCtrl) + historyBatches := []*shared.History{ + &shared.History{ + Events: []*shared.HistoryEvent{ + &shared.HistoryEvent{ + EventId: common.Int64Ptr(common.FirstEventID + 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion + 1), + }, + }, + }, + } + historyBlob := &archiver.HistoryBlob{ + Header: &archiver.HistoryBlobHeader{ + IsLast: common.BoolPtr(true), + }, + Body: historyBatches, + } + gomock.InOrder( + historyIterator.EXPECT().HasNext().Return(true), + historyIterator.EXPECT().Next().Return(historyBlob, nil), + ) + + historyArchiver := s.newTestHistoryArchiver(historyIterator) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + err := historyArchiver.Archive(context.Background(), s.testArchivalURI, request) + s.Error(err) +} + +func (s *historyArchiverSuite) TestArchive_Fail_NonRetriableErrorOption() { + mockCtrl := gomock.NewController(s.T()) + defer mockCtrl.Finish() + historyIterator := archiver.NewMockHistoryIterator(mockCtrl) + gomock.InOrder( + historyIterator.EXPECT().HasNext().Return(true), + historyIterator.EXPECT().Next().Return(nil, errors.New("some random error")), + ) + + historyArchiver := s.newTestHistoryArchiver(historyIterator) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + nonRetryableErr := errors.New("some non-retryable error") + err := historyArchiver.Archive(context.Background(), s.testArchivalURI, request, archiver.GetNonRetriableErrorOption(nonRetryableErr)) + s.Equal(nonRetryableErr, err) +} + +func (s *historyArchiverSuite) TestArchive_Success() { + mockCtrl := gomock.NewController(s.T()) + defer mockCtrl.Finish() + historyIterator := archiver.NewMockHistoryIterator(mockCtrl) + historyBatches := []*shared.History{ + &shared.History{ + Events: []*shared.HistoryEvent{ + &shared.HistoryEvent{ + EventId: common.Int64Ptr(common.FirstEventID + 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion), + }, + &shared.HistoryEvent{ + EventId: common.Int64Ptr(common.FirstEventID + 2), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion), + }, + }, + }, + &shared.History{ + Events: []*shared.HistoryEvent{ + &shared.HistoryEvent{ + EventId: common.Int64Ptr(testNextEventID - 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion), + }, + }, + }, + } + historyBlob := &archiver.HistoryBlob{ + Header: &archiver.HistoryBlobHeader{ + IsLast: common.BoolPtr(true), + }, + Body: historyBatches, + } + gomock.InOrder( + historyIterator.EXPECT().HasNext().Return(true), + historyIterator.EXPECT().Next().Return(historyBlob, nil), + historyIterator.EXPECT().HasNext().Return(false), + ) + + historyArchiver := s.newTestHistoryArchiver(historyIterator) + request := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + URI, err := archiver.NewURI(testBucketURI + "/TestArchive_Success") + s.NoError(err) + err = historyArchiver.Archive(context.Background(), URI, request) + s.NoError(err) + + expectedkey := constructHistoryKey("", testDomainID, testWorkflowID, testRunID, testCloseFailoverVersion) + s.assertKeyExists(expectedkey) +} + +func (s *historyArchiverSuite) TestGet_Fail_InvalidURI() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: 100, + } + URI, err := archiver.NewURI("wrongscheme://") + s.NoError(err) + response, err := historyArchiver.Get(context.Background(), URI, request) + s.Nil(response) + s.Error(err) +} + +func (s *historyArchiverSuite) TestGet_Fail_InvalidRequest() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: 0, // pageSize should be greater than 0 + } + response, err := historyArchiver.Get(context.Background(), s.testArchivalURI, request) + s.Nil(response) + s.Error(err) + s.IsType(&shared.BadRequestError{}, err) +} + +func (s *historyArchiverSuite) TestGet_Fail_InvalidToken() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: testPageSize, + NextPageToken: []byte{'r', 'a', 'n', 'd', 'o', 'm'}, + } + URI, err := archiver.NewURI(testBucketURI) + s.NoError(err) + response, err := historyArchiver.Get(context.Background(), URI, request) + s.Nil(response) + s.Error(err) + s.IsType(&shared.BadRequestError{}, err) +} + +func (s *historyArchiverSuite) TestGet_Fail_KeyNotExist() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: testPageSize, + CloseFailoverVersion: common.Int64Ptr(testCloseFailoverVersion), + } + URI, err := archiver.NewURI("s3://test-bucket/non-existent") + s.NoError(err) + response, err := historyArchiver.Get(context.Background(), URI, request) + s.Nil(response) + s.Error(err) + s.IsType(&shared.BadRequestError{}, err) +} + +func (s *historyArchiverSuite) TestGet_Success_PickHighestVersion() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: testPageSize, + } + URI, err := archiver.NewURI(testBucketURI) + s.NoError(err) + response, err := historyArchiver.Get(context.Background(), URI, request) + s.NoError(err) + s.Nil(response.NextPageToken) + s.Equal(s.historyBatchesV100, response.HistoryBatches) +} + +func (s *historyArchiverSuite) TestGet_Success_UseProvidedVersion() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: testPageSize, + CloseFailoverVersion: common.Int64Ptr(1), + } + URI, err := archiver.NewURI(testBucketURI) + s.NoError(err) + response, err := historyArchiver.Get(context.Background(), URI, request) + s.NoError(err) + s.Nil(response.NextPageToken) + s.Equal(s.historyBatchesV1, response.HistoryBatches) +} + +func (s *historyArchiverSuite) TestGet_Success_SmallPageSize() { + historyArchiver := s.newTestHistoryArchiver(nil) + request := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: 1, + CloseFailoverVersion: common.Int64Ptr(100), + } + combinedHistory := []*shared.History{} + + URI, err := archiver.NewURI(testBucketURI) + s.NoError(err) + response, err := historyArchiver.Get(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.NotNil(response.NextPageToken) + s.NotNil(response.HistoryBatches) + s.Len(response.HistoryBatches, 1) + combinedHistory = append(combinedHistory, response.HistoryBatches...) + + request.NextPageToken = response.NextPageToken + response, err = historyArchiver.Get(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.Nil(response.NextPageToken) + s.NotNil(response.HistoryBatches) + s.Len(response.HistoryBatches, 1) + combinedHistory = append(combinedHistory, response.HistoryBatches...) + + s.Equal(s.historyBatchesV100, combinedHistory) +} + +func (s *historyArchiverSuite) TestArchiveAndGet() { + mockCtrl := gomock.NewController(s.T()) + defer mockCtrl.Finish() + historyIterator := archiver.NewMockHistoryIterator(mockCtrl) + historyBlob := &archiver.HistoryBlob{ + Header: &archiver.HistoryBlobHeader{ + IsLast: common.BoolPtr(true), + }, + Body: s.historyBatchesV100, + } + gomock.InOrder( + historyIterator.EXPECT().HasNext().Return(true), + historyIterator.EXPECT().Next().Return(historyBlob, nil), + historyIterator.EXPECT().HasNext().Return(false), + ) + + historyArchiver := s.newTestHistoryArchiver(historyIterator) + archiveRequest := &archiver.ArchiveHistoryRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + BranchToken: testBranchToken, + NextEventID: testNextEventID, + CloseFailoverVersion: testCloseFailoverVersion, + } + URI, err := archiver.NewURI(testBucketURI + "/TestArchiveAndGet") + s.NoError(err) + err = historyArchiver.Archive(context.Background(), URI, archiveRequest) + s.NoError(err) + + expectedkey := constructHistoryKey("", testDomainID, testWorkflowID, testRunID, testCloseFailoverVersion) + s.assertKeyExists(expectedkey) + + getRequest := &archiver.GetHistoryRequest{ + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + PageSize: testPageSize, + } + response, err := historyArchiver.Get(context.Background(), URI, getRequest) + s.NoError(err) + s.NotNil(response) + s.Nil(response.NextPageToken) + s.Equal(s.historyBatchesV100, response.HistoryBatches) +} + +func (s *historyArchiverSuite) newTestHistoryArchiver(historyIterator archiver.HistoryIterator) *historyArchiver { + //config := &config.S3Archiver{} + //archiver, err := newHistoryArchiver(s.container, config, historyIterator) + archiver := &historyArchiver{ + container: s.container, + s3cli: s.s3cli, + historyIterator: historyIterator, + } + return archiver +} + +func (s *historyArchiverSuite) setupHistoryDirectory() { + s.historyBatchesV1 = []*shared.History{ + &shared.History{ + Events: []*shared.HistoryEvent{ + &shared.HistoryEvent{ + EventId: common.Int64Ptr(testNextEventID - 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(1), + }, + }, + }, + } + + s.historyBatchesV100 = []*shared.History{ + &shared.History{ + Events: []*shared.HistoryEvent{ + &shared.HistoryEvent{ + EventId: common.Int64Ptr(common.FirstEventID + 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion), + }, + &shared.HistoryEvent{ + EventId: common.Int64Ptr(common.FirstEventID + 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion), + }, + }, + }, + &shared.History{ + Events: []*shared.HistoryEvent{ + &shared.HistoryEvent{ + EventId: common.Int64Ptr(testNextEventID - 1), + Timestamp: common.Int64Ptr(time.Now().UnixNano()), + Version: common.Int64Ptr(testCloseFailoverVersion), + }, + }, + }, + } + + s.writeHistoryBatchesForGetTest(s.historyBatchesV1, int64(1)) + s.writeHistoryBatchesForGetTest(s.historyBatchesV100, testCloseFailoverVersion) +} + +func (s *historyArchiverSuite) writeHistoryBatchesForGetTest(historyBatches []*shared.History, version int64) { + data, err := encode(historyBatches) + s.Require().NoError(err) + key := constructHistoryKey("", testDomainID, testWorkflowID, testRunID, version) + _, err = s.s3cli.PutObjectWithContext(context.Background(), &s3.PutObjectInput{ + Bucket: aws.String(testBucket), + Key: aws.String(key), + Body: bytes.NewReader(data), + }) + s.Require().NoError(err) +} + +func (s *historyArchiverSuite) assertKeyExists(key string) { + _, err := s.s3cli.GetObjectWithContext(context.Background(), &s3.GetObjectInput{ + Bucket: aws.String(testBucket), + Key: aws.String(key), + }) + s.NoError(err) +} + +func getCanceledContext() context.Context { + ctx, cancel := context.WithCancel(context.Background()) + cancel() + return ctx +} diff --git a/common/archiver/s3store/mocks/S3API.go b/common/archiver/s3store/mocks/S3API.go new file mode 100644 index 00000000000..4f561b4a7ee --- /dev/null +++ b/common/archiver/s3store/mocks/S3API.go @@ -0,0 +1,7214 @@ +// The MIT License (MIT) +// +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// Code generated by mockery v1.0.0. DO NOT EDIT. + +package mocks + +import context "context" +import mock "github.com/stretchr/testify/mock" +import request "github.com/aws/aws-sdk-go/aws/request" +import s3 "github.com/aws/aws-sdk-go/service/s3" + +// S3API is an autogenerated mock type for the S3API type +type S3API struct { + mock.Mock +} + +// AbortMultipartUpload provides a mock function with given fields: _a0 +func (_m *S3API) AbortMultipartUpload(_a0 *s3.AbortMultipartUploadInput) (*s3.AbortMultipartUploadOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.AbortMultipartUploadOutput + if rf, ok := ret.Get(0).(func(*s3.AbortMultipartUploadInput) *s3.AbortMultipartUploadOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.AbortMultipartUploadOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.AbortMultipartUploadInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// AbortMultipartUploadRequest provides a mock function with given fields: _a0 +func (_m *S3API) AbortMultipartUploadRequest(_a0 *s3.AbortMultipartUploadInput) (*request.Request, *s3.AbortMultipartUploadOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.AbortMultipartUploadInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.AbortMultipartUploadOutput + if rf, ok := ret.Get(1).(func(*s3.AbortMultipartUploadInput) *s3.AbortMultipartUploadOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.AbortMultipartUploadOutput) + } + } + + return r0, r1 +} + +// AbortMultipartUploadWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) AbortMultipartUploadWithContext(_a0 context.Context, _a1 *s3.AbortMultipartUploadInput, _a2 ...request.Option) (*s3.AbortMultipartUploadOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.AbortMultipartUploadOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.AbortMultipartUploadInput, ...request.Option) *s3.AbortMultipartUploadOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.AbortMultipartUploadOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.AbortMultipartUploadInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CompleteMultipartUpload provides a mock function with given fields: _a0 +func (_m *S3API) CompleteMultipartUpload(_a0 *s3.CompleteMultipartUploadInput) (*s3.CompleteMultipartUploadOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.CompleteMultipartUploadOutput + if rf, ok := ret.Get(0).(func(*s3.CompleteMultipartUploadInput) *s3.CompleteMultipartUploadOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CompleteMultipartUploadOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.CompleteMultipartUploadInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CompleteMultipartUploadRequest provides a mock function with given fields: _a0 +func (_m *S3API) CompleteMultipartUploadRequest(_a0 *s3.CompleteMultipartUploadInput) (*request.Request, *s3.CompleteMultipartUploadOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.CompleteMultipartUploadInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.CompleteMultipartUploadOutput + if rf, ok := ret.Get(1).(func(*s3.CompleteMultipartUploadInput) *s3.CompleteMultipartUploadOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.CompleteMultipartUploadOutput) + } + } + + return r0, r1 +} + +// CompleteMultipartUploadWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) CompleteMultipartUploadWithContext(_a0 context.Context, _a1 *s3.CompleteMultipartUploadInput, _a2 ...request.Option) (*s3.CompleteMultipartUploadOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.CompleteMultipartUploadOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.CompleteMultipartUploadInput, ...request.Option) *s3.CompleteMultipartUploadOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CompleteMultipartUploadOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.CompleteMultipartUploadInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CopyObject provides a mock function with given fields: _a0 +func (_m *S3API) CopyObject(_a0 *s3.CopyObjectInput) (*s3.CopyObjectOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.CopyObjectOutput + if rf, ok := ret.Get(0).(func(*s3.CopyObjectInput) *s3.CopyObjectOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CopyObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.CopyObjectInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CopyObjectRequest provides a mock function with given fields: _a0 +func (_m *S3API) CopyObjectRequest(_a0 *s3.CopyObjectInput) (*request.Request, *s3.CopyObjectOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.CopyObjectInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.CopyObjectOutput + if rf, ok := ret.Get(1).(func(*s3.CopyObjectInput) *s3.CopyObjectOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.CopyObjectOutput) + } + } + + return r0, r1 +} + +// CopyObjectWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) CopyObjectWithContext(_a0 context.Context, _a1 *s3.CopyObjectInput, _a2 ...request.Option) (*s3.CopyObjectOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.CopyObjectOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.CopyObjectInput, ...request.Option) *s3.CopyObjectOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CopyObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.CopyObjectInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateBucket provides a mock function with given fields: _a0 +func (_m *S3API) CreateBucket(_a0 *s3.CreateBucketInput) (*s3.CreateBucketOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.CreateBucketOutput + if rf, ok := ret.Get(0).(func(*s3.CreateBucketInput) *s3.CreateBucketOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CreateBucketOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.CreateBucketInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateBucketRequest provides a mock function with given fields: _a0 +func (_m *S3API) CreateBucketRequest(_a0 *s3.CreateBucketInput) (*request.Request, *s3.CreateBucketOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.CreateBucketInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.CreateBucketOutput + if rf, ok := ret.Get(1).(func(*s3.CreateBucketInput) *s3.CreateBucketOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.CreateBucketOutput) + } + } + + return r0, r1 +} + +// CreateBucketWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) CreateBucketWithContext(_a0 context.Context, _a1 *s3.CreateBucketInput, _a2 ...request.Option) (*s3.CreateBucketOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.CreateBucketOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.CreateBucketInput, ...request.Option) *s3.CreateBucketOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CreateBucketOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.CreateBucketInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateMultipartUpload provides a mock function with given fields: _a0 +func (_m *S3API) CreateMultipartUpload(_a0 *s3.CreateMultipartUploadInput) (*s3.CreateMultipartUploadOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.CreateMultipartUploadOutput + if rf, ok := ret.Get(0).(func(*s3.CreateMultipartUploadInput) *s3.CreateMultipartUploadOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CreateMultipartUploadOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.CreateMultipartUploadInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateMultipartUploadRequest provides a mock function with given fields: _a0 +func (_m *S3API) CreateMultipartUploadRequest(_a0 *s3.CreateMultipartUploadInput) (*request.Request, *s3.CreateMultipartUploadOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.CreateMultipartUploadInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.CreateMultipartUploadOutput + if rf, ok := ret.Get(1).(func(*s3.CreateMultipartUploadInput) *s3.CreateMultipartUploadOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.CreateMultipartUploadOutput) + } + } + + return r0, r1 +} + +// CreateMultipartUploadWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) CreateMultipartUploadWithContext(_a0 context.Context, _a1 *s3.CreateMultipartUploadInput, _a2 ...request.Option) (*s3.CreateMultipartUploadOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.CreateMultipartUploadOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.CreateMultipartUploadInput, ...request.Option) *s3.CreateMultipartUploadOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.CreateMultipartUploadOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.CreateMultipartUploadInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucket provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucket(_a0 *s3.DeleteBucketInput) (*s3.DeleteBucketOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketInput) *s3.DeleteBucketOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketAnalyticsConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketAnalyticsConfiguration(_a0 *s3.DeleteBucketAnalyticsConfigurationInput) (*s3.DeleteBucketAnalyticsConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketAnalyticsConfigurationInput) *s3.DeleteBucketAnalyticsConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketAnalyticsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketAnalyticsConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketAnalyticsConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketAnalyticsConfigurationRequest(_a0 *s3.DeleteBucketAnalyticsConfigurationInput) (*request.Request, *s3.DeleteBucketAnalyticsConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketAnalyticsConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketAnalyticsConfigurationInput) *s3.DeleteBucketAnalyticsConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketAnalyticsConfigurationOutput) + } + } + + return r0, r1 +} + +// DeleteBucketAnalyticsConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketAnalyticsConfigurationWithContext(_a0 context.Context, _a1 *s3.DeleteBucketAnalyticsConfigurationInput, _a2 ...request.Option) (*s3.DeleteBucketAnalyticsConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketAnalyticsConfigurationInput, ...request.Option) *s3.DeleteBucketAnalyticsConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketAnalyticsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketAnalyticsConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketCors provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketCors(_a0 *s3.DeleteBucketCorsInput) (*s3.DeleteBucketCorsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketCorsOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketCorsInput) *s3.DeleteBucketCorsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketCorsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketCorsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketCorsRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketCorsRequest(_a0 *s3.DeleteBucketCorsInput) (*request.Request, *s3.DeleteBucketCorsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketCorsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketCorsOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketCorsInput) *s3.DeleteBucketCorsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketCorsOutput) + } + } + + return r0, r1 +} + +// DeleteBucketCorsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketCorsWithContext(_a0 context.Context, _a1 *s3.DeleteBucketCorsInput, _a2 ...request.Option) (*s3.DeleteBucketCorsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketCorsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketCorsInput, ...request.Option) *s3.DeleteBucketCorsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketCorsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketCorsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketEncryption provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketEncryption(_a0 *s3.DeleteBucketEncryptionInput) (*s3.DeleteBucketEncryptionOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketEncryptionOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketEncryptionInput) *s3.DeleteBucketEncryptionOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketEncryptionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketEncryptionInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketEncryptionRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketEncryptionRequest(_a0 *s3.DeleteBucketEncryptionInput) (*request.Request, *s3.DeleteBucketEncryptionOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketEncryptionInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketEncryptionOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketEncryptionInput) *s3.DeleteBucketEncryptionOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketEncryptionOutput) + } + } + + return r0, r1 +} + +// DeleteBucketEncryptionWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketEncryptionWithContext(_a0 context.Context, _a1 *s3.DeleteBucketEncryptionInput, _a2 ...request.Option) (*s3.DeleteBucketEncryptionOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketEncryptionOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketEncryptionInput, ...request.Option) *s3.DeleteBucketEncryptionOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketEncryptionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketEncryptionInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketInventoryConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketInventoryConfiguration(_a0 *s3.DeleteBucketInventoryConfigurationInput) (*s3.DeleteBucketInventoryConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketInventoryConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketInventoryConfigurationInput) *s3.DeleteBucketInventoryConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketInventoryConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketInventoryConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketInventoryConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketInventoryConfigurationRequest(_a0 *s3.DeleteBucketInventoryConfigurationInput) (*request.Request, *s3.DeleteBucketInventoryConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketInventoryConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketInventoryConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketInventoryConfigurationInput) *s3.DeleteBucketInventoryConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketInventoryConfigurationOutput) + } + } + + return r0, r1 +} + +// DeleteBucketInventoryConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketInventoryConfigurationWithContext(_a0 context.Context, _a1 *s3.DeleteBucketInventoryConfigurationInput, _a2 ...request.Option) (*s3.DeleteBucketInventoryConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketInventoryConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketInventoryConfigurationInput, ...request.Option) *s3.DeleteBucketInventoryConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketInventoryConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketInventoryConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketLifecycle provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketLifecycle(_a0 *s3.DeleteBucketLifecycleInput) (*s3.DeleteBucketLifecycleOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketLifecycleOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketLifecycleInput) *s3.DeleteBucketLifecycleOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketLifecycleOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketLifecycleInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketLifecycleRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketLifecycleRequest(_a0 *s3.DeleteBucketLifecycleInput) (*request.Request, *s3.DeleteBucketLifecycleOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketLifecycleInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketLifecycleOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketLifecycleInput) *s3.DeleteBucketLifecycleOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketLifecycleOutput) + } + } + + return r0, r1 +} + +// DeleteBucketLifecycleWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketLifecycleWithContext(_a0 context.Context, _a1 *s3.DeleteBucketLifecycleInput, _a2 ...request.Option) (*s3.DeleteBucketLifecycleOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketLifecycleOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketLifecycleInput, ...request.Option) *s3.DeleteBucketLifecycleOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketLifecycleOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketLifecycleInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketMetricsConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketMetricsConfiguration(_a0 *s3.DeleteBucketMetricsConfigurationInput) (*s3.DeleteBucketMetricsConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketMetricsConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketMetricsConfigurationInput) *s3.DeleteBucketMetricsConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketMetricsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketMetricsConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketMetricsConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketMetricsConfigurationRequest(_a0 *s3.DeleteBucketMetricsConfigurationInput) (*request.Request, *s3.DeleteBucketMetricsConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketMetricsConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketMetricsConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketMetricsConfigurationInput) *s3.DeleteBucketMetricsConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketMetricsConfigurationOutput) + } + } + + return r0, r1 +} + +// DeleteBucketMetricsConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketMetricsConfigurationWithContext(_a0 context.Context, _a1 *s3.DeleteBucketMetricsConfigurationInput, _a2 ...request.Option) (*s3.DeleteBucketMetricsConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketMetricsConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketMetricsConfigurationInput, ...request.Option) *s3.DeleteBucketMetricsConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketMetricsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketMetricsConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketPolicy provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketPolicy(_a0 *s3.DeleteBucketPolicyInput) (*s3.DeleteBucketPolicyOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketPolicyOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketPolicyInput) *s3.DeleteBucketPolicyOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketPolicyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketPolicyInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketPolicyRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketPolicyRequest(_a0 *s3.DeleteBucketPolicyInput) (*request.Request, *s3.DeleteBucketPolicyOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketPolicyInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketPolicyOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketPolicyInput) *s3.DeleteBucketPolicyOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketPolicyOutput) + } + } + + return r0, r1 +} + +// DeleteBucketPolicyWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketPolicyWithContext(_a0 context.Context, _a1 *s3.DeleteBucketPolicyInput, _a2 ...request.Option) (*s3.DeleteBucketPolicyOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketPolicyOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketPolicyInput, ...request.Option) *s3.DeleteBucketPolicyOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketPolicyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketPolicyInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketReplication provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketReplication(_a0 *s3.DeleteBucketReplicationInput) (*s3.DeleteBucketReplicationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketReplicationOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketReplicationInput) *s3.DeleteBucketReplicationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketReplicationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketReplicationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketReplicationRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketReplicationRequest(_a0 *s3.DeleteBucketReplicationInput) (*request.Request, *s3.DeleteBucketReplicationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketReplicationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketReplicationOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketReplicationInput) *s3.DeleteBucketReplicationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketReplicationOutput) + } + } + + return r0, r1 +} + +// DeleteBucketReplicationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketReplicationWithContext(_a0 context.Context, _a1 *s3.DeleteBucketReplicationInput, _a2 ...request.Option) (*s3.DeleteBucketReplicationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketReplicationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketReplicationInput, ...request.Option) *s3.DeleteBucketReplicationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketReplicationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketReplicationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketRequest(_a0 *s3.DeleteBucketInput) (*request.Request, *s3.DeleteBucketOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketInput) *s3.DeleteBucketOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketOutput) + } + } + + return r0, r1 +} + +// DeleteBucketTagging provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketTagging(_a0 *s3.DeleteBucketTaggingInput) (*s3.DeleteBucketTaggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketTaggingOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketTaggingInput) *s3.DeleteBucketTaggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketTaggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketTaggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketTaggingRequest(_a0 *s3.DeleteBucketTaggingInput) (*request.Request, *s3.DeleteBucketTaggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketTaggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketTaggingOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketTaggingInput) *s3.DeleteBucketTaggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketTaggingOutput) + } + } + + return r0, r1 +} + +// DeleteBucketTaggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketTaggingWithContext(_a0 context.Context, _a1 *s3.DeleteBucketTaggingInput, _a2 ...request.Option) (*s3.DeleteBucketTaggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketTaggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketTaggingInput, ...request.Option) *s3.DeleteBucketTaggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketTaggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketWebsite provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketWebsite(_a0 *s3.DeleteBucketWebsiteInput) (*s3.DeleteBucketWebsiteOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteBucketWebsiteOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketWebsiteInput) *s3.DeleteBucketWebsiteOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketWebsiteOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketWebsiteInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketWebsiteRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteBucketWebsiteRequest(_a0 *s3.DeleteBucketWebsiteInput) (*request.Request, *s3.DeleteBucketWebsiteOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteBucketWebsiteInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteBucketWebsiteOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteBucketWebsiteInput) *s3.DeleteBucketWebsiteOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteBucketWebsiteOutput) + } + } + + return r0, r1 +} + +// DeleteBucketWebsiteWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketWebsiteWithContext(_a0 context.Context, _a1 *s3.DeleteBucketWebsiteInput, _a2 ...request.Option) (*s3.DeleteBucketWebsiteOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketWebsiteOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketWebsiteInput, ...request.Option) *s3.DeleteBucketWebsiteOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketWebsiteOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketWebsiteInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteBucketWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteBucketWithContext(_a0 context.Context, _a1 *s3.DeleteBucketInput, _a2 ...request.Option) (*s3.DeleteBucketOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteBucketOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteBucketInput, ...request.Option) *s3.DeleteBucketOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteBucketOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteBucketInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteObject provides a mock function with given fields: _a0 +func (_m *S3API) DeleteObject(_a0 *s3.DeleteObjectInput) (*s3.DeleteObjectOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteObjectOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteObjectInput) *s3.DeleteObjectOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteObjectInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteObjectRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteObjectRequest(_a0 *s3.DeleteObjectInput) (*request.Request, *s3.DeleteObjectOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteObjectInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteObjectOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteObjectInput) *s3.DeleteObjectOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteObjectOutput) + } + } + + return r0, r1 +} + +// DeleteObjectTagging provides a mock function with given fields: _a0 +func (_m *S3API) DeleteObjectTagging(_a0 *s3.DeleteObjectTaggingInput) (*s3.DeleteObjectTaggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteObjectTaggingOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteObjectTaggingInput) *s3.DeleteObjectTaggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteObjectTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteObjectTaggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteObjectTaggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteObjectTaggingRequest(_a0 *s3.DeleteObjectTaggingInput) (*request.Request, *s3.DeleteObjectTaggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteObjectTaggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteObjectTaggingOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteObjectTaggingInput) *s3.DeleteObjectTaggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteObjectTaggingOutput) + } + } + + return r0, r1 +} + +// DeleteObjectTaggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteObjectTaggingWithContext(_a0 context.Context, _a1 *s3.DeleteObjectTaggingInput, _a2 ...request.Option) (*s3.DeleteObjectTaggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteObjectTaggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteObjectTaggingInput, ...request.Option) *s3.DeleteObjectTaggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteObjectTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteObjectTaggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteObjectWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteObjectWithContext(_a0 context.Context, _a1 *s3.DeleteObjectInput, _a2 ...request.Option) (*s3.DeleteObjectOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteObjectOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteObjectInput, ...request.Option) *s3.DeleteObjectOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteObjectInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteObjects provides a mock function with given fields: _a0 +func (_m *S3API) DeleteObjects(_a0 *s3.DeleteObjectsInput) (*s3.DeleteObjectsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeleteObjectsOutput + if rf, ok := ret.Get(0).(func(*s3.DeleteObjectsInput) *s3.DeleteObjectsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteObjectsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeleteObjectsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteObjectsRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeleteObjectsRequest(_a0 *s3.DeleteObjectsInput) (*request.Request, *s3.DeleteObjectsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeleteObjectsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeleteObjectsOutput + if rf, ok := ret.Get(1).(func(*s3.DeleteObjectsInput) *s3.DeleteObjectsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeleteObjectsOutput) + } + } + + return r0, r1 +} + +// DeleteObjectsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeleteObjectsWithContext(_a0 context.Context, _a1 *s3.DeleteObjectsInput, _a2 ...request.Option) (*s3.DeleteObjectsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeleteObjectsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeleteObjectsInput, ...request.Option) *s3.DeleteObjectsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeleteObjectsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeleteObjectsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeletePublicAccessBlock provides a mock function with given fields: _a0 +func (_m *S3API) DeletePublicAccessBlock(_a0 *s3.DeletePublicAccessBlockInput) (*s3.DeletePublicAccessBlockOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.DeletePublicAccessBlockOutput + if rf, ok := ret.Get(0).(func(*s3.DeletePublicAccessBlockInput) *s3.DeletePublicAccessBlockOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeletePublicAccessBlockOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.DeletePublicAccessBlockInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeletePublicAccessBlockRequest provides a mock function with given fields: _a0 +func (_m *S3API) DeletePublicAccessBlockRequest(_a0 *s3.DeletePublicAccessBlockInput) (*request.Request, *s3.DeletePublicAccessBlockOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.DeletePublicAccessBlockInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.DeletePublicAccessBlockOutput + if rf, ok := ret.Get(1).(func(*s3.DeletePublicAccessBlockInput) *s3.DeletePublicAccessBlockOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.DeletePublicAccessBlockOutput) + } + } + + return r0, r1 +} + +// DeletePublicAccessBlockWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) DeletePublicAccessBlockWithContext(_a0 context.Context, _a1 *s3.DeletePublicAccessBlockInput, _a2 ...request.Option) (*s3.DeletePublicAccessBlockOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.DeletePublicAccessBlockOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.DeletePublicAccessBlockInput, ...request.Option) *s3.DeletePublicAccessBlockOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.DeletePublicAccessBlockOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.DeletePublicAccessBlockInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketAccelerateConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketAccelerateConfiguration(_a0 *s3.GetBucketAccelerateConfigurationInput) (*s3.GetBucketAccelerateConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketAccelerateConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketAccelerateConfigurationInput) *s3.GetBucketAccelerateConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketAccelerateConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketAccelerateConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketAccelerateConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketAccelerateConfigurationRequest(_a0 *s3.GetBucketAccelerateConfigurationInput) (*request.Request, *s3.GetBucketAccelerateConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketAccelerateConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketAccelerateConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketAccelerateConfigurationInput) *s3.GetBucketAccelerateConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketAccelerateConfigurationOutput) + } + } + + return r0, r1 +} + +// GetBucketAccelerateConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketAccelerateConfigurationWithContext(_a0 context.Context, _a1 *s3.GetBucketAccelerateConfigurationInput, _a2 ...request.Option) (*s3.GetBucketAccelerateConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketAccelerateConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketAccelerateConfigurationInput, ...request.Option) *s3.GetBucketAccelerateConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketAccelerateConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketAccelerateConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketAcl provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketAcl(_a0 *s3.GetBucketAclInput) (*s3.GetBucketAclOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketAclOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketAclInput) *s3.GetBucketAclOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketAclInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketAclRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketAclRequest(_a0 *s3.GetBucketAclInput) (*request.Request, *s3.GetBucketAclOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketAclInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketAclOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketAclInput) *s3.GetBucketAclOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketAclOutput) + } + } + + return r0, r1 +} + +// GetBucketAclWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketAclWithContext(_a0 context.Context, _a1 *s3.GetBucketAclInput, _a2 ...request.Option) (*s3.GetBucketAclOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketAclOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketAclInput, ...request.Option) *s3.GetBucketAclOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketAclInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketAnalyticsConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketAnalyticsConfiguration(_a0 *s3.GetBucketAnalyticsConfigurationInput) (*s3.GetBucketAnalyticsConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketAnalyticsConfigurationInput) *s3.GetBucketAnalyticsConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketAnalyticsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketAnalyticsConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketAnalyticsConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketAnalyticsConfigurationRequest(_a0 *s3.GetBucketAnalyticsConfigurationInput) (*request.Request, *s3.GetBucketAnalyticsConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketAnalyticsConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketAnalyticsConfigurationInput) *s3.GetBucketAnalyticsConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketAnalyticsConfigurationOutput) + } + } + + return r0, r1 +} + +// GetBucketAnalyticsConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketAnalyticsConfigurationWithContext(_a0 context.Context, _a1 *s3.GetBucketAnalyticsConfigurationInput, _a2 ...request.Option) (*s3.GetBucketAnalyticsConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketAnalyticsConfigurationInput, ...request.Option) *s3.GetBucketAnalyticsConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketAnalyticsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketAnalyticsConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketCors provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketCors(_a0 *s3.GetBucketCorsInput) (*s3.GetBucketCorsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketCorsOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketCorsInput) *s3.GetBucketCorsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketCorsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketCorsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketCorsRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketCorsRequest(_a0 *s3.GetBucketCorsInput) (*request.Request, *s3.GetBucketCorsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketCorsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketCorsOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketCorsInput) *s3.GetBucketCorsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketCorsOutput) + } + } + + return r0, r1 +} + +// GetBucketCorsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketCorsWithContext(_a0 context.Context, _a1 *s3.GetBucketCorsInput, _a2 ...request.Option) (*s3.GetBucketCorsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketCorsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketCorsInput, ...request.Option) *s3.GetBucketCorsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketCorsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketCorsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketEncryption provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketEncryption(_a0 *s3.GetBucketEncryptionInput) (*s3.GetBucketEncryptionOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketEncryptionOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketEncryptionInput) *s3.GetBucketEncryptionOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketEncryptionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketEncryptionInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketEncryptionRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketEncryptionRequest(_a0 *s3.GetBucketEncryptionInput) (*request.Request, *s3.GetBucketEncryptionOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketEncryptionInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketEncryptionOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketEncryptionInput) *s3.GetBucketEncryptionOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketEncryptionOutput) + } + } + + return r0, r1 +} + +// GetBucketEncryptionWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketEncryptionWithContext(_a0 context.Context, _a1 *s3.GetBucketEncryptionInput, _a2 ...request.Option) (*s3.GetBucketEncryptionOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketEncryptionOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketEncryptionInput, ...request.Option) *s3.GetBucketEncryptionOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketEncryptionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketEncryptionInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketInventoryConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketInventoryConfiguration(_a0 *s3.GetBucketInventoryConfigurationInput) (*s3.GetBucketInventoryConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketInventoryConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketInventoryConfigurationInput) *s3.GetBucketInventoryConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketInventoryConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketInventoryConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketInventoryConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketInventoryConfigurationRequest(_a0 *s3.GetBucketInventoryConfigurationInput) (*request.Request, *s3.GetBucketInventoryConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketInventoryConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketInventoryConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketInventoryConfigurationInput) *s3.GetBucketInventoryConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketInventoryConfigurationOutput) + } + } + + return r0, r1 +} + +// GetBucketInventoryConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketInventoryConfigurationWithContext(_a0 context.Context, _a1 *s3.GetBucketInventoryConfigurationInput, _a2 ...request.Option) (*s3.GetBucketInventoryConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketInventoryConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketInventoryConfigurationInput, ...request.Option) *s3.GetBucketInventoryConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketInventoryConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketInventoryConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLifecycle provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLifecycle(_a0 *s3.GetBucketLifecycleInput) (*s3.GetBucketLifecycleOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketLifecycleOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketLifecycleInput) *s3.GetBucketLifecycleOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLifecycleOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketLifecycleInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLifecycleConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLifecycleConfiguration(_a0 *s3.GetBucketLifecycleConfigurationInput) (*s3.GetBucketLifecycleConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketLifecycleConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketLifecycleConfigurationInput) *s3.GetBucketLifecycleConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLifecycleConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketLifecycleConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLifecycleConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLifecycleConfigurationRequest(_a0 *s3.GetBucketLifecycleConfigurationInput) (*request.Request, *s3.GetBucketLifecycleConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketLifecycleConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketLifecycleConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketLifecycleConfigurationInput) *s3.GetBucketLifecycleConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketLifecycleConfigurationOutput) + } + } + + return r0, r1 +} + +// GetBucketLifecycleConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketLifecycleConfigurationWithContext(_a0 context.Context, _a1 *s3.GetBucketLifecycleConfigurationInput, _a2 ...request.Option) (*s3.GetBucketLifecycleConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketLifecycleConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketLifecycleConfigurationInput, ...request.Option) *s3.GetBucketLifecycleConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLifecycleConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketLifecycleConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLifecycleRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLifecycleRequest(_a0 *s3.GetBucketLifecycleInput) (*request.Request, *s3.GetBucketLifecycleOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketLifecycleInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketLifecycleOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketLifecycleInput) *s3.GetBucketLifecycleOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketLifecycleOutput) + } + } + + return r0, r1 +} + +// GetBucketLifecycleWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketLifecycleWithContext(_a0 context.Context, _a1 *s3.GetBucketLifecycleInput, _a2 ...request.Option) (*s3.GetBucketLifecycleOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketLifecycleOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketLifecycleInput, ...request.Option) *s3.GetBucketLifecycleOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLifecycleOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketLifecycleInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLocation provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLocation(_a0 *s3.GetBucketLocationInput) (*s3.GetBucketLocationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketLocationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketLocationInput) *s3.GetBucketLocationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLocationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketLocationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLocationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLocationRequest(_a0 *s3.GetBucketLocationInput) (*request.Request, *s3.GetBucketLocationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketLocationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketLocationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketLocationInput) *s3.GetBucketLocationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketLocationOutput) + } + } + + return r0, r1 +} + +// GetBucketLocationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketLocationWithContext(_a0 context.Context, _a1 *s3.GetBucketLocationInput, _a2 ...request.Option) (*s3.GetBucketLocationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketLocationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketLocationInput, ...request.Option) *s3.GetBucketLocationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLocationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketLocationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLogging provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLogging(_a0 *s3.GetBucketLoggingInput) (*s3.GetBucketLoggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketLoggingOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketLoggingInput) *s3.GetBucketLoggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLoggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketLoggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketLoggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketLoggingRequest(_a0 *s3.GetBucketLoggingInput) (*request.Request, *s3.GetBucketLoggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketLoggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketLoggingOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketLoggingInput) *s3.GetBucketLoggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketLoggingOutput) + } + } + + return r0, r1 +} + +// GetBucketLoggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketLoggingWithContext(_a0 context.Context, _a1 *s3.GetBucketLoggingInput, _a2 ...request.Option) (*s3.GetBucketLoggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketLoggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketLoggingInput, ...request.Option) *s3.GetBucketLoggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketLoggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketLoggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketMetricsConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketMetricsConfiguration(_a0 *s3.GetBucketMetricsConfigurationInput) (*s3.GetBucketMetricsConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketMetricsConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketMetricsConfigurationInput) *s3.GetBucketMetricsConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketMetricsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketMetricsConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketMetricsConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketMetricsConfigurationRequest(_a0 *s3.GetBucketMetricsConfigurationInput) (*request.Request, *s3.GetBucketMetricsConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketMetricsConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketMetricsConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketMetricsConfigurationInput) *s3.GetBucketMetricsConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketMetricsConfigurationOutput) + } + } + + return r0, r1 +} + +// GetBucketMetricsConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketMetricsConfigurationWithContext(_a0 context.Context, _a1 *s3.GetBucketMetricsConfigurationInput, _a2 ...request.Option) (*s3.GetBucketMetricsConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketMetricsConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketMetricsConfigurationInput, ...request.Option) *s3.GetBucketMetricsConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketMetricsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketMetricsConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketNotification provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketNotification(_a0 *s3.GetBucketNotificationConfigurationRequest) (*s3.NotificationConfigurationDeprecated, error) { + ret := _m.Called(_a0) + + var r0 *s3.NotificationConfigurationDeprecated + if rf, ok := ret.Get(0).(func(*s3.GetBucketNotificationConfigurationRequest) *s3.NotificationConfigurationDeprecated); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.NotificationConfigurationDeprecated) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketNotificationConfigurationRequest) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketNotificationConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketNotificationConfiguration(_a0 *s3.GetBucketNotificationConfigurationRequest) (*s3.NotificationConfiguration, error) { + ret := _m.Called(_a0) + + var r0 *s3.NotificationConfiguration + if rf, ok := ret.Get(0).(func(*s3.GetBucketNotificationConfigurationRequest) *s3.NotificationConfiguration); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.NotificationConfiguration) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketNotificationConfigurationRequest) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketNotificationConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketNotificationConfigurationRequest(_a0 *s3.GetBucketNotificationConfigurationRequest) (*request.Request, *s3.NotificationConfiguration) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketNotificationConfigurationRequest) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.NotificationConfiguration + if rf, ok := ret.Get(1).(func(*s3.GetBucketNotificationConfigurationRequest) *s3.NotificationConfiguration); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.NotificationConfiguration) + } + } + + return r0, r1 +} + +// GetBucketNotificationConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketNotificationConfigurationWithContext(_a0 context.Context, _a1 *s3.GetBucketNotificationConfigurationRequest, _a2 ...request.Option) (*s3.NotificationConfiguration, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.NotificationConfiguration + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketNotificationConfigurationRequest, ...request.Option) *s3.NotificationConfiguration); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.NotificationConfiguration) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketNotificationConfigurationRequest, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketNotificationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketNotificationRequest(_a0 *s3.GetBucketNotificationConfigurationRequest) (*request.Request, *s3.NotificationConfigurationDeprecated) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketNotificationConfigurationRequest) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.NotificationConfigurationDeprecated + if rf, ok := ret.Get(1).(func(*s3.GetBucketNotificationConfigurationRequest) *s3.NotificationConfigurationDeprecated); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.NotificationConfigurationDeprecated) + } + } + + return r0, r1 +} + +// GetBucketNotificationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketNotificationWithContext(_a0 context.Context, _a1 *s3.GetBucketNotificationConfigurationRequest, _a2 ...request.Option) (*s3.NotificationConfigurationDeprecated, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.NotificationConfigurationDeprecated + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketNotificationConfigurationRequest, ...request.Option) *s3.NotificationConfigurationDeprecated); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.NotificationConfigurationDeprecated) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketNotificationConfigurationRequest, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketPolicy provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketPolicy(_a0 *s3.GetBucketPolicyInput) (*s3.GetBucketPolicyOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketPolicyOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketPolicyInput) *s3.GetBucketPolicyOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketPolicyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketPolicyInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketPolicyRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketPolicyRequest(_a0 *s3.GetBucketPolicyInput) (*request.Request, *s3.GetBucketPolicyOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketPolicyInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketPolicyOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketPolicyInput) *s3.GetBucketPolicyOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketPolicyOutput) + } + } + + return r0, r1 +} + +// GetBucketPolicyStatus provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketPolicyStatus(_a0 *s3.GetBucketPolicyStatusInput) (*s3.GetBucketPolicyStatusOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketPolicyStatusOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketPolicyStatusInput) *s3.GetBucketPolicyStatusOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketPolicyStatusOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketPolicyStatusInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketPolicyStatusRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketPolicyStatusRequest(_a0 *s3.GetBucketPolicyStatusInput) (*request.Request, *s3.GetBucketPolicyStatusOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketPolicyStatusInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketPolicyStatusOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketPolicyStatusInput) *s3.GetBucketPolicyStatusOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketPolicyStatusOutput) + } + } + + return r0, r1 +} + +// GetBucketPolicyStatusWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketPolicyStatusWithContext(_a0 context.Context, _a1 *s3.GetBucketPolicyStatusInput, _a2 ...request.Option) (*s3.GetBucketPolicyStatusOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketPolicyStatusOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketPolicyStatusInput, ...request.Option) *s3.GetBucketPolicyStatusOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketPolicyStatusOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketPolicyStatusInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketPolicyWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketPolicyWithContext(_a0 context.Context, _a1 *s3.GetBucketPolicyInput, _a2 ...request.Option) (*s3.GetBucketPolicyOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketPolicyOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketPolicyInput, ...request.Option) *s3.GetBucketPolicyOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketPolicyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketPolicyInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketReplication provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketReplication(_a0 *s3.GetBucketReplicationInput) (*s3.GetBucketReplicationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketReplicationOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketReplicationInput) *s3.GetBucketReplicationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketReplicationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketReplicationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketReplicationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketReplicationRequest(_a0 *s3.GetBucketReplicationInput) (*request.Request, *s3.GetBucketReplicationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketReplicationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketReplicationOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketReplicationInput) *s3.GetBucketReplicationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketReplicationOutput) + } + } + + return r0, r1 +} + +// GetBucketReplicationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketReplicationWithContext(_a0 context.Context, _a1 *s3.GetBucketReplicationInput, _a2 ...request.Option) (*s3.GetBucketReplicationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketReplicationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketReplicationInput, ...request.Option) *s3.GetBucketReplicationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketReplicationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketReplicationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketRequestPayment provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketRequestPayment(_a0 *s3.GetBucketRequestPaymentInput) (*s3.GetBucketRequestPaymentOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketRequestPaymentOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketRequestPaymentInput) *s3.GetBucketRequestPaymentOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketRequestPaymentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketRequestPaymentInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketRequestPaymentRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketRequestPaymentRequest(_a0 *s3.GetBucketRequestPaymentInput) (*request.Request, *s3.GetBucketRequestPaymentOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketRequestPaymentInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketRequestPaymentOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketRequestPaymentInput) *s3.GetBucketRequestPaymentOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketRequestPaymentOutput) + } + } + + return r0, r1 +} + +// GetBucketRequestPaymentWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketRequestPaymentWithContext(_a0 context.Context, _a1 *s3.GetBucketRequestPaymentInput, _a2 ...request.Option) (*s3.GetBucketRequestPaymentOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketRequestPaymentOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketRequestPaymentInput, ...request.Option) *s3.GetBucketRequestPaymentOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketRequestPaymentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketRequestPaymentInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketTagging provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketTagging(_a0 *s3.GetBucketTaggingInput) (*s3.GetBucketTaggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketTaggingOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketTaggingInput) *s3.GetBucketTaggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketTaggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketTaggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketTaggingRequest(_a0 *s3.GetBucketTaggingInput) (*request.Request, *s3.GetBucketTaggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketTaggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketTaggingOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketTaggingInput) *s3.GetBucketTaggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketTaggingOutput) + } + } + + return r0, r1 +} + +// GetBucketTaggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketTaggingWithContext(_a0 context.Context, _a1 *s3.GetBucketTaggingInput, _a2 ...request.Option) (*s3.GetBucketTaggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketTaggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketTaggingInput, ...request.Option) *s3.GetBucketTaggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketTaggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketVersioning provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketVersioning(_a0 *s3.GetBucketVersioningInput) (*s3.GetBucketVersioningOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketVersioningOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketVersioningInput) *s3.GetBucketVersioningOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketVersioningOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketVersioningInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketVersioningRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketVersioningRequest(_a0 *s3.GetBucketVersioningInput) (*request.Request, *s3.GetBucketVersioningOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketVersioningInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketVersioningOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketVersioningInput) *s3.GetBucketVersioningOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketVersioningOutput) + } + } + + return r0, r1 +} + +// GetBucketVersioningWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketVersioningWithContext(_a0 context.Context, _a1 *s3.GetBucketVersioningInput, _a2 ...request.Option) (*s3.GetBucketVersioningOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketVersioningOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketVersioningInput, ...request.Option) *s3.GetBucketVersioningOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketVersioningOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketVersioningInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketWebsite provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketWebsite(_a0 *s3.GetBucketWebsiteInput) (*s3.GetBucketWebsiteOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetBucketWebsiteOutput + if rf, ok := ret.Get(0).(func(*s3.GetBucketWebsiteInput) *s3.GetBucketWebsiteOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketWebsiteOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetBucketWebsiteInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBucketWebsiteRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetBucketWebsiteRequest(_a0 *s3.GetBucketWebsiteInput) (*request.Request, *s3.GetBucketWebsiteOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetBucketWebsiteInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetBucketWebsiteOutput + if rf, ok := ret.Get(1).(func(*s3.GetBucketWebsiteInput) *s3.GetBucketWebsiteOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetBucketWebsiteOutput) + } + } + + return r0, r1 +} + +// GetBucketWebsiteWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetBucketWebsiteWithContext(_a0 context.Context, _a1 *s3.GetBucketWebsiteInput, _a2 ...request.Option) (*s3.GetBucketWebsiteOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetBucketWebsiteOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetBucketWebsiteInput, ...request.Option) *s3.GetBucketWebsiteOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetBucketWebsiteOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetBucketWebsiteInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObject provides a mock function with given fields: _a0 +func (_m *S3API) GetObject(_a0 *s3.GetObjectInput) (*s3.GetObjectOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectInput) *s3.GetObjectOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectAcl provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectAcl(_a0 *s3.GetObjectAclInput) (*s3.GetObjectAclOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectAclOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectAclInput) *s3.GetObjectAclOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectAclInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectAclRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectAclRequest(_a0 *s3.GetObjectAclInput) (*request.Request, *s3.GetObjectAclOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectAclInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectAclOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectAclInput) *s3.GetObjectAclOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectAclOutput) + } + } + + return r0, r1 +} + +// GetObjectAclWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectAclWithContext(_a0 context.Context, _a1 *s3.GetObjectAclInput, _a2 ...request.Option) (*s3.GetObjectAclOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectAclOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectAclInput, ...request.Option) *s3.GetObjectAclOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectAclInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectLegalHold provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectLegalHold(_a0 *s3.GetObjectLegalHoldInput) (*s3.GetObjectLegalHoldOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectLegalHoldOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectLegalHoldInput) *s3.GetObjectLegalHoldOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectLegalHoldOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectLegalHoldInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectLegalHoldRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectLegalHoldRequest(_a0 *s3.GetObjectLegalHoldInput) (*request.Request, *s3.GetObjectLegalHoldOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectLegalHoldInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectLegalHoldOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectLegalHoldInput) *s3.GetObjectLegalHoldOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectLegalHoldOutput) + } + } + + return r0, r1 +} + +// GetObjectLegalHoldWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectLegalHoldWithContext(_a0 context.Context, _a1 *s3.GetObjectLegalHoldInput, _a2 ...request.Option) (*s3.GetObjectLegalHoldOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectLegalHoldOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectLegalHoldInput, ...request.Option) *s3.GetObjectLegalHoldOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectLegalHoldOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectLegalHoldInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectLockConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectLockConfiguration(_a0 *s3.GetObjectLockConfigurationInput) (*s3.GetObjectLockConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectLockConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectLockConfigurationInput) *s3.GetObjectLockConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectLockConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectLockConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectLockConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectLockConfigurationRequest(_a0 *s3.GetObjectLockConfigurationInput) (*request.Request, *s3.GetObjectLockConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectLockConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectLockConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectLockConfigurationInput) *s3.GetObjectLockConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectLockConfigurationOutput) + } + } + + return r0, r1 +} + +// GetObjectLockConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectLockConfigurationWithContext(_a0 context.Context, _a1 *s3.GetObjectLockConfigurationInput, _a2 ...request.Option) (*s3.GetObjectLockConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectLockConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectLockConfigurationInput, ...request.Option) *s3.GetObjectLockConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectLockConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectLockConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectRequest(_a0 *s3.GetObjectInput) (*request.Request, *s3.GetObjectOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectInput) *s3.GetObjectOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectOutput) + } + } + + return r0, r1 +} + +// GetObjectRetention provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectRetention(_a0 *s3.GetObjectRetentionInput) (*s3.GetObjectRetentionOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectRetentionOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectRetentionInput) *s3.GetObjectRetentionOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectRetentionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectRetentionInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectRetentionRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectRetentionRequest(_a0 *s3.GetObjectRetentionInput) (*request.Request, *s3.GetObjectRetentionOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectRetentionInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectRetentionOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectRetentionInput) *s3.GetObjectRetentionOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectRetentionOutput) + } + } + + return r0, r1 +} + +// GetObjectRetentionWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectRetentionWithContext(_a0 context.Context, _a1 *s3.GetObjectRetentionInput, _a2 ...request.Option) (*s3.GetObjectRetentionOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectRetentionOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectRetentionInput, ...request.Option) *s3.GetObjectRetentionOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectRetentionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectRetentionInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectTagging provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectTagging(_a0 *s3.GetObjectTaggingInput) (*s3.GetObjectTaggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectTaggingOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectTaggingInput) *s3.GetObjectTaggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectTaggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectTaggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectTaggingRequest(_a0 *s3.GetObjectTaggingInput) (*request.Request, *s3.GetObjectTaggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectTaggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectTaggingOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectTaggingInput) *s3.GetObjectTaggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectTaggingOutput) + } + } + + return r0, r1 +} + +// GetObjectTaggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectTaggingWithContext(_a0 context.Context, _a1 *s3.GetObjectTaggingInput, _a2 ...request.Option) (*s3.GetObjectTaggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectTaggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectTaggingInput, ...request.Option) *s3.GetObjectTaggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectTaggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectTorrent provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectTorrent(_a0 *s3.GetObjectTorrentInput) (*s3.GetObjectTorrentOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetObjectTorrentOutput + if rf, ok := ret.Get(0).(func(*s3.GetObjectTorrentInput) *s3.GetObjectTorrentOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectTorrentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetObjectTorrentInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectTorrentRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetObjectTorrentRequest(_a0 *s3.GetObjectTorrentInput) (*request.Request, *s3.GetObjectTorrentOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetObjectTorrentInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetObjectTorrentOutput + if rf, ok := ret.Get(1).(func(*s3.GetObjectTorrentInput) *s3.GetObjectTorrentOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetObjectTorrentOutput) + } + } + + return r0, r1 +} + +// GetObjectTorrentWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectTorrentWithContext(_a0 context.Context, _a1 *s3.GetObjectTorrentInput, _a2 ...request.Option) (*s3.GetObjectTorrentOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectTorrentOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectTorrentInput, ...request.Option) *s3.GetObjectTorrentOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectTorrentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectTorrentInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetObjectWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetObjectWithContext(_a0 context.Context, _a1 *s3.GetObjectInput, _a2 ...request.Option) (*s3.GetObjectOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetObjectOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectInput, ...request.Option) *s3.GetObjectOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPublicAccessBlock provides a mock function with given fields: _a0 +func (_m *S3API) GetPublicAccessBlock(_a0 *s3.GetPublicAccessBlockInput) (*s3.GetPublicAccessBlockOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.GetPublicAccessBlockOutput + if rf, ok := ret.Get(0).(func(*s3.GetPublicAccessBlockInput) *s3.GetPublicAccessBlockOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetPublicAccessBlockOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.GetPublicAccessBlockInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPublicAccessBlockRequest provides a mock function with given fields: _a0 +func (_m *S3API) GetPublicAccessBlockRequest(_a0 *s3.GetPublicAccessBlockInput) (*request.Request, *s3.GetPublicAccessBlockOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.GetPublicAccessBlockInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.GetPublicAccessBlockOutput + if rf, ok := ret.Get(1).(func(*s3.GetPublicAccessBlockInput) *s3.GetPublicAccessBlockOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.GetPublicAccessBlockOutput) + } + } + + return r0, r1 +} + +// GetPublicAccessBlockWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) GetPublicAccessBlockWithContext(_a0 context.Context, _a1 *s3.GetPublicAccessBlockInput, _a2 ...request.Option) (*s3.GetPublicAccessBlockOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.GetPublicAccessBlockOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.GetPublicAccessBlockInput, ...request.Option) *s3.GetPublicAccessBlockOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.GetPublicAccessBlockOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.GetPublicAccessBlockInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// HeadBucket provides a mock function with given fields: _a0 +func (_m *S3API) HeadBucket(_a0 *s3.HeadBucketInput) (*s3.HeadBucketOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.HeadBucketOutput + if rf, ok := ret.Get(0).(func(*s3.HeadBucketInput) *s3.HeadBucketOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.HeadBucketOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.HeadBucketInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// HeadBucketRequest provides a mock function with given fields: _a0 +func (_m *S3API) HeadBucketRequest(_a0 *s3.HeadBucketInput) (*request.Request, *s3.HeadBucketOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.HeadBucketInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.HeadBucketOutput + if rf, ok := ret.Get(1).(func(*s3.HeadBucketInput) *s3.HeadBucketOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.HeadBucketOutput) + } + } + + return r0, r1 +} + +// HeadBucketWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) HeadBucketWithContext(_a0 context.Context, _a1 *s3.HeadBucketInput, _a2 ...request.Option) (*s3.HeadBucketOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.HeadBucketOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.HeadBucketInput, ...request.Option) *s3.HeadBucketOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.HeadBucketOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.HeadBucketInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// HeadObject provides a mock function with given fields: _a0 +func (_m *S3API) HeadObject(_a0 *s3.HeadObjectInput) (*s3.HeadObjectOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.HeadObjectOutput + if rf, ok := ret.Get(0).(func(*s3.HeadObjectInput) *s3.HeadObjectOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.HeadObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.HeadObjectInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// HeadObjectRequest provides a mock function with given fields: _a0 +func (_m *S3API) HeadObjectRequest(_a0 *s3.HeadObjectInput) (*request.Request, *s3.HeadObjectOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.HeadObjectInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.HeadObjectOutput + if rf, ok := ret.Get(1).(func(*s3.HeadObjectInput) *s3.HeadObjectOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.HeadObjectOutput) + } + } + + return r0, r1 +} + +// HeadObjectWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) HeadObjectWithContext(_a0 context.Context, _a1 *s3.HeadObjectInput, _a2 ...request.Option) (*s3.HeadObjectOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.HeadObjectOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.HeadObjectInput, ...request.Option) *s3.HeadObjectOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.HeadObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.HeadObjectInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketAnalyticsConfigurations provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketAnalyticsConfigurations(_a0 *s3.ListBucketAnalyticsConfigurationsInput) (*s3.ListBucketAnalyticsConfigurationsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListBucketAnalyticsConfigurationsOutput + if rf, ok := ret.Get(0).(func(*s3.ListBucketAnalyticsConfigurationsInput) *s3.ListBucketAnalyticsConfigurationsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketAnalyticsConfigurationsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListBucketAnalyticsConfigurationsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketAnalyticsConfigurationsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketAnalyticsConfigurationsRequest(_a0 *s3.ListBucketAnalyticsConfigurationsInput) (*request.Request, *s3.ListBucketAnalyticsConfigurationsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListBucketAnalyticsConfigurationsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListBucketAnalyticsConfigurationsOutput + if rf, ok := ret.Get(1).(func(*s3.ListBucketAnalyticsConfigurationsInput) *s3.ListBucketAnalyticsConfigurationsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListBucketAnalyticsConfigurationsOutput) + } + } + + return r0, r1 +} + +// ListBucketAnalyticsConfigurationsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListBucketAnalyticsConfigurationsWithContext(_a0 context.Context, _a1 *s3.ListBucketAnalyticsConfigurationsInput, _a2 ...request.Option) (*s3.ListBucketAnalyticsConfigurationsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListBucketAnalyticsConfigurationsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListBucketAnalyticsConfigurationsInput, ...request.Option) *s3.ListBucketAnalyticsConfigurationsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketAnalyticsConfigurationsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListBucketAnalyticsConfigurationsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketInventoryConfigurations provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketInventoryConfigurations(_a0 *s3.ListBucketInventoryConfigurationsInput) (*s3.ListBucketInventoryConfigurationsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListBucketInventoryConfigurationsOutput + if rf, ok := ret.Get(0).(func(*s3.ListBucketInventoryConfigurationsInput) *s3.ListBucketInventoryConfigurationsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketInventoryConfigurationsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListBucketInventoryConfigurationsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketInventoryConfigurationsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketInventoryConfigurationsRequest(_a0 *s3.ListBucketInventoryConfigurationsInput) (*request.Request, *s3.ListBucketInventoryConfigurationsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListBucketInventoryConfigurationsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListBucketInventoryConfigurationsOutput + if rf, ok := ret.Get(1).(func(*s3.ListBucketInventoryConfigurationsInput) *s3.ListBucketInventoryConfigurationsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListBucketInventoryConfigurationsOutput) + } + } + + return r0, r1 +} + +// ListBucketInventoryConfigurationsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListBucketInventoryConfigurationsWithContext(_a0 context.Context, _a1 *s3.ListBucketInventoryConfigurationsInput, _a2 ...request.Option) (*s3.ListBucketInventoryConfigurationsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListBucketInventoryConfigurationsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListBucketInventoryConfigurationsInput, ...request.Option) *s3.ListBucketInventoryConfigurationsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketInventoryConfigurationsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListBucketInventoryConfigurationsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketMetricsConfigurations provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketMetricsConfigurations(_a0 *s3.ListBucketMetricsConfigurationsInput) (*s3.ListBucketMetricsConfigurationsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListBucketMetricsConfigurationsOutput + if rf, ok := ret.Get(0).(func(*s3.ListBucketMetricsConfigurationsInput) *s3.ListBucketMetricsConfigurationsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketMetricsConfigurationsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListBucketMetricsConfigurationsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketMetricsConfigurationsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketMetricsConfigurationsRequest(_a0 *s3.ListBucketMetricsConfigurationsInput) (*request.Request, *s3.ListBucketMetricsConfigurationsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListBucketMetricsConfigurationsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListBucketMetricsConfigurationsOutput + if rf, ok := ret.Get(1).(func(*s3.ListBucketMetricsConfigurationsInput) *s3.ListBucketMetricsConfigurationsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListBucketMetricsConfigurationsOutput) + } + } + + return r0, r1 +} + +// ListBucketMetricsConfigurationsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListBucketMetricsConfigurationsWithContext(_a0 context.Context, _a1 *s3.ListBucketMetricsConfigurationsInput, _a2 ...request.Option) (*s3.ListBucketMetricsConfigurationsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListBucketMetricsConfigurationsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListBucketMetricsConfigurationsInput, ...request.Option) *s3.ListBucketMetricsConfigurationsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketMetricsConfigurationsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListBucketMetricsConfigurationsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBuckets provides a mock function with given fields: _a0 +func (_m *S3API) ListBuckets(_a0 *s3.ListBucketsInput) (*s3.ListBucketsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListBucketsOutput + if rf, ok := ret.Get(0).(func(*s3.ListBucketsInput) *s3.ListBucketsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListBucketsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListBucketsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListBucketsRequest(_a0 *s3.ListBucketsInput) (*request.Request, *s3.ListBucketsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListBucketsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListBucketsOutput + if rf, ok := ret.Get(1).(func(*s3.ListBucketsInput) *s3.ListBucketsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListBucketsOutput) + } + } + + return r0, r1 +} + +// ListBucketsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListBucketsWithContext(_a0 context.Context, _a1 *s3.ListBucketsInput, _a2 ...request.Option) (*s3.ListBucketsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListBucketsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListBucketsInput, ...request.Option) *s3.ListBucketsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListBucketsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListBucketsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListMultipartUploads provides a mock function with given fields: _a0 +func (_m *S3API) ListMultipartUploads(_a0 *s3.ListMultipartUploadsInput) (*s3.ListMultipartUploadsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListMultipartUploadsOutput + if rf, ok := ret.Get(0).(func(*s3.ListMultipartUploadsInput) *s3.ListMultipartUploadsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListMultipartUploadsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListMultipartUploadsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListMultipartUploadsPages provides a mock function with given fields: _a0, _a1 +func (_m *S3API) ListMultipartUploadsPages(_a0 *s3.ListMultipartUploadsInput, _a1 func(*s3.ListMultipartUploadsOutput, bool) bool) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.ListMultipartUploadsInput, func(*s3.ListMultipartUploadsOutput, bool) bool) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListMultipartUploadsPagesWithContext provides a mock function with given fields: _a0, _a1, _a2, _a3 +func (_m *S3API) ListMultipartUploadsPagesWithContext(_a0 context.Context, _a1 *s3.ListMultipartUploadsInput, _a2 func(*s3.ListMultipartUploadsOutput, bool) bool, _a3 ...request.Option) error { + _va := make([]interface{}, len(_a3)) + for _i := range _a3 { + _va[_i] = _a3[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1, _a2) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListMultipartUploadsInput, func(*s3.ListMultipartUploadsOutput, bool) bool, ...request.Option) error); ok { + r0 = rf(_a0, _a1, _a2, _a3...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListMultipartUploadsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListMultipartUploadsRequest(_a0 *s3.ListMultipartUploadsInput) (*request.Request, *s3.ListMultipartUploadsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListMultipartUploadsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListMultipartUploadsOutput + if rf, ok := ret.Get(1).(func(*s3.ListMultipartUploadsInput) *s3.ListMultipartUploadsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListMultipartUploadsOutput) + } + } + + return r0, r1 +} + +// ListMultipartUploadsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListMultipartUploadsWithContext(_a0 context.Context, _a1 *s3.ListMultipartUploadsInput, _a2 ...request.Option) (*s3.ListMultipartUploadsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListMultipartUploadsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListMultipartUploadsInput, ...request.Option) *s3.ListMultipartUploadsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListMultipartUploadsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListMultipartUploadsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListObjectVersions provides a mock function with given fields: _a0 +func (_m *S3API) ListObjectVersions(_a0 *s3.ListObjectVersionsInput) (*s3.ListObjectVersionsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListObjectVersionsOutput + if rf, ok := ret.Get(0).(func(*s3.ListObjectVersionsInput) *s3.ListObjectVersionsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListObjectVersionsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListObjectVersionsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListObjectVersionsPages provides a mock function with given fields: _a0, _a1 +func (_m *S3API) ListObjectVersionsPages(_a0 *s3.ListObjectVersionsInput, _a1 func(*s3.ListObjectVersionsOutput, bool) bool) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.ListObjectVersionsInput, func(*s3.ListObjectVersionsOutput, bool) bool) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListObjectVersionsPagesWithContext provides a mock function with given fields: _a0, _a1, _a2, _a3 +func (_m *S3API) ListObjectVersionsPagesWithContext(_a0 context.Context, _a1 *s3.ListObjectVersionsInput, _a2 func(*s3.ListObjectVersionsOutput, bool) bool, _a3 ...request.Option) error { + _va := make([]interface{}, len(_a3)) + for _i := range _a3 { + _va[_i] = _a3[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1, _a2) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListObjectVersionsInput, func(*s3.ListObjectVersionsOutput, bool) bool, ...request.Option) error); ok { + r0 = rf(_a0, _a1, _a2, _a3...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListObjectVersionsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListObjectVersionsRequest(_a0 *s3.ListObjectVersionsInput) (*request.Request, *s3.ListObjectVersionsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListObjectVersionsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListObjectVersionsOutput + if rf, ok := ret.Get(1).(func(*s3.ListObjectVersionsInput) *s3.ListObjectVersionsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListObjectVersionsOutput) + } + } + + return r0, r1 +} + +// ListObjectVersionsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListObjectVersionsWithContext(_a0 context.Context, _a1 *s3.ListObjectVersionsInput, _a2 ...request.Option) (*s3.ListObjectVersionsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListObjectVersionsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListObjectVersionsInput, ...request.Option) *s3.ListObjectVersionsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListObjectVersionsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListObjectVersionsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListObjects provides a mock function with given fields: _a0 +func (_m *S3API) ListObjects(_a0 *s3.ListObjectsInput) (*s3.ListObjectsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListObjectsOutput + if rf, ok := ret.Get(0).(func(*s3.ListObjectsInput) *s3.ListObjectsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListObjectsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListObjectsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListObjectsPages provides a mock function with given fields: _a0, _a1 +func (_m *S3API) ListObjectsPages(_a0 *s3.ListObjectsInput, _a1 func(*s3.ListObjectsOutput, bool) bool) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.ListObjectsInput, func(*s3.ListObjectsOutput, bool) bool) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListObjectsPagesWithContext provides a mock function with given fields: _a0, _a1, _a2, _a3 +func (_m *S3API) ListObjectsPagesWithContext(_a0 context.Context, _a1 *s3.ListObjectsInput, _a2 func(*s3.ListObjectsOutput, bool) bool, _a3 ...request.Option) error { + _va := make([]interface{}, len(_a3)) + for _i := range _a3 { + _va[_i] = _a3[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1, _a2) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListObjectsInput, func(*s3.ListObjectsOutput, bool) bool, ...request.Option) error); ok { + r0 = rf(_a0, _a1, _a2, _a3...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListObjectsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListObjectsRequest(_a0 *s3.ListObjectsInput) (*request.Request, *s3.ListObjectsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListObjectsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListObjectsOutput + if rf, ok := ret.Get(1).(func(*s3.ListObjectsInput) *s3.ListObjectsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListObjectsOutput) + } + } + + return r0, r1 +} + +// ListObjectsV2 provides a mock function with given fields: _a0 +func (_m *S3API) ListObjectsV2(_a0 *s3.ListObjectsV2Input) (*s3.ListObjectsV2Output, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListObjectsV2Output + if rf, ok := ret.Get(0).(func(*s3.ListObjectsV2Input) *s3.ListObjectsV2Output); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListObjectsV2Output) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListObjectsV2Input) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListObjectsV2Pages provides a mock function with given fields: _a0, _a1 +func (_m *S3API) ListObjectsV2Pages(_a0 *s3.ListObjectsV2Input, _a1 func(*s3.ListObjectsV2Output, bool) bool) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.ListObjectsV2Input, func(*s3.ListObjectsV2Output, bool) bool) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListObjectsV2PagesWithContext provides a mock function with given fields: _a0, _a1, _a2, _a3 +func (_m *S3API) ListObjectsV2PagesWithContext(_a0 context.Context, _a1 *s3.ListObjectsV2Input, _a2 func(*s3.ListObjectsV2Output, bool) bool, _a3 ...request.Option) error { + _va := make([]interface{}, len(_a3)) + for _i := range _a3 { + _va[_i] = _a3[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1, _a2) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListObjectsV2Input, func(*s3.ListObjectsV2Output, bool) bool, ...request.Option) error); ok { + r0 = rf(_a0, _a1, _a2, _a3...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListObjectsV2Request provides a mock function with given fields: _a0 +func (_m *S3API) ListObjectsV2Request(_a0 *s3.ListObjectsV2Input) (*request.Request, *s3.ListObjectsV2Output) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListObjectsV2Input) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListObjectsV2Output + if rf, ok := ret.Get(1).(func(*s3.ListObjectsV2Input) *s3.ListObjectsV2Output); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListObjectsV2Output) + } + } + + return r0, r1 +} + +// ListObjectsV2WithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListObjectsV2WithContext(_a0 context.Context, _a1 *s3.ListObjectsV2Input, _a2 ...request.Option) (*s3.ListObjectsV2Output, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListObjectsV2Output + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListObjectsV2Input, ...request.Option) *s3.ListObjectsV2Output); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListObjectsV2Output) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListObjectsV2Input, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListObjectsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListObjectsWithContext(_a0 context.Context, _a1 *s3.ListObjectsInput, _a2 ...request.Option) (*s3.ListObjectsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListObjectsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListObjectsInput, ...request.Option) *s3.ListObjectsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListObjectsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListObjectsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListParts provides a mock function with given fields: _a0 +func (_m *S3API) ListParts(_a0 *s3.ListPartsInput) (*s3.ListPartsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.ListPartsOutput + if rf, ok := ret.Get(0).(func(*s3.ListPartsInput) *s3.ListPartsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListPartsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.ListPartsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListPartsPages provides a mock function with given fields: _a0, _a1 +func (_m *S3API) ListPartsPages(_a0 *s3.ListPartsInput, _a1 func(*s3.ListPartsOutput, bool) bool) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.ListPartsInput, func(*s3.ListPartsOutput, bool) bool) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListPartsPagesWithContext provides a mock function with given fields: _a0, _a1, _a2, _a3 +func (_m *S3API) ListPartsPagesWithContext(_a0 context.Context, _a1 *s3.ListPartsInput, _a2 func(*s3.ListPartsOutput, bool) bool, _a3 ...request.Option) error { + _va := make([]interface{}, len(_a3)) + for _i := range _a3 { + _va[_i] = _a3[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1, _a2) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListPartsInput, func(*s3.ListPartsOutput, bool) bool, ...request.Option) error); ok { + r0 = rf(_a0, _a1, _a2, _a3...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ListPartsRequest provides a mock function with given fields: _a0 +func (_m *S3API) ListPartsRequest(_a0 *s3.ListPartsInput) (*request.Request, *s3.ListPartsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.ListPartsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.ListPartsOutput + if rf, ok := ret.Get(1).(func(*s3.ListPartsInput) *s3.ListPartsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.ListPartsOutput) + } + } + + return r0, r1 +} + +// ListPartsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) ListPartsWithContext(_a0 context.Context, _a1 *s3.ListPartsInput, _a2 ...request.Option) (*s3.ListPartsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.ListPartsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.ListPartsInput, ...request.Option) *s3.ListPartsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.ListPartsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.ListPartsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketAccelerateConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketAccelerateConfiguration(_a0 *s3.PutBucketAccelerateConfigurationInput) (*s3.PutBucketAccelerateConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketAccelerateConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketAccelerateConfigurationInput) *s3.PutBucketAccelerateConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketAccelerateConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketAccelerateConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketAccelerateConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketAccelerateConfigurationRequest(_a0 *s3.PutBucketAccelerateConfigurationInput) (*request.Request, *s3.PutBucketAccelerateConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketAccelerateConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketAccelerateConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketAccelerateConfigurationInput) *s3.PutBucketAccelerateConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketAccelerateConfigurationOutput) + } + } + + return r0, r1 +} + +// PutBucketAccelerateConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketAccelerateConfigurationWithContext(_a0 context.Context, _a1 *s3.PutBucketAccelerateConfigurationInput, _a2 ...request.Option) (*s3.PutBucketAccelerateConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketAccelerateConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketAccelerateConfigurationInput, ...request.Option) *s3.PutBucketAccelerateConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketAccelerateConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketAccelerateConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketAcl provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketAcl(_a0 *s3.PutBucketAclInput) (*s3.PutBucketAclOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketAclOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketAclInput) *s3.PutBucketAclOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketAclInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketAclRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketAclRequest(_a0 *s3.PutBucketAclInput) (*request.Request, *s3.PutBucketAclOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketAclInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketAclOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketAclInput) *s3.PutBucketAclOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketAclOutput) + } + } + + return r0, r1 +} + +// PutBucketAclWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketAclWithContext(_a0 context.Context, _a1 *s3.PutBucketAclInput, _a2 ...request.Option) (*s3.PutBucketAclOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketAclOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketAclInput, ...request.Option) *s3.PutBucketAclOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketAclInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketAnalyticsConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketAnalyticsConfiguration(_a0 *s3.PutBucketAnalyticsConfigurationInput) (*s3.PutBucketAnalyticsConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketAnalyticsConfigurationInput) *s3.PutBucketAnalyticsConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketAnalyticsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketAnalyticsConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketAnalyticsConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketAnalyticsConfigurationRequest(_a0 *s3.PutBucketAnalyticsConfigurationInput) (*request.Request, *s3.PutBucketAnalyticsConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketAnalyticsConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketAnalyticsConfigurationInput) *s3.PutBucketAnalyticsConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketAnalyticsConfigurationOutput) + } + } + + return r0, r1 +} + +// PutBucketAnalyticsConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketAnalyticsConfigurationWithContext(_a0 context.Context, _a1 *s3.PutBucketAnalyticsConfigurationInput, _a2 ...request.Option) (*s3.PutBucketAnalyticsConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketAnalyticsConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketAnalyticsConfigurationInput, ...request.Option) *s3.PutBucketAnalyticsConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketAnalyticsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketAnalyticsConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketCors provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketCors(_a0 *s3.PutBucketCorsInput) (*s3.PutBucketCorsOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketCorsOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketCorsInput) *s3.PutBucketCorsOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketCorsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketCorsInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketCorsRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketCorsRequest(_a0 *s3.PutBucketCorsInput) (*request.Request, *s3.PutBucketCorsOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketCorsInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketCorsOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketCorsInput) *s3.PutBucketCorsOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketCorsOutput) + } + } + + return r0, r1 +} + +// PutBucketCorsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketCorsWithContext(_a0 context.Context, _a1 *s3.PutBucketCorsInput, _a2 ...request.Option) (*s3.PutBucketCorsOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketCorsOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketCorsInput, ...request.Option) *s3.PutBucketCorsOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketCorsOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketCorsInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketEncryption provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketEncryption(_a0 *s3.PutBucketEncryptionInput) (*s3.PutBucketEncryptionOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketEncryptionOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketEncryptionInput) *s3.PutBucketEncryptionOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketEncryptionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketEncryptionInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketEncryptionRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketEncryptionRequest(_a0 *s3.PutBucketEncryptionInput) (*request.Request, *s3.PutBucketEncryptionOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketEncryptionInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketEncryptionOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketEncryptionInput) *s3.PutBucketEncryptionOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketEncryptionOutput) + } + } + + return r0, r1 +} + +// PutBucketEncryptionWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketEncryptionWithContext(_a0 context.Context, _a1 *s3.PutBucketEncryptionInput, _a2 ...request.Option) (*s3.PutBucketEncryptionOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketEncryptionOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketEncryptionInput, ...request.Option) *s3.PutBucketEncryptionOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketEncryptionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketEncryptionInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketInventoryConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketInventoryConfiguration(_a0 *s3.PutBucketInventoryConfigurationInput) (*s3.PutBucketInventoryConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketInventoryConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketInventoryConfigurationInput) *s3.PutBucketInventoryConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketInventoryConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketInventoryConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketInventoryConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketInventoryConfigurationRequest(_a0 *s3.PutBucketInventoryConfigurationInput) (*request.Request, *s3.PutBucketInventoryConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketInventoryConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketInventoryConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketInventoryConfigurationInput) *s3.PutBucketInventoryConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketInventoryConfigurationOutput) + } + } + + return r0, r1 +} + +// PutBucketInventoryConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketInventoryConfigurationWithContext(_a0 context.Context, _a1 *s3.PutBucketInventoryConfigurationInput, _a2 ...request.Option) (*s3.PutBucketInventoryConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketInventoryConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketInventoryConfigurationInput, ...request.Option) *s3.PutBucketInventoryConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketInventoryConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketInventoryConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketLifecycle provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketLifecycle(_a0 *s3.PutBucketLifecycleInput) (*s3.PutBucketLifecycleOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketLifecycleOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketLifecycleInput) *s3.PutBucketLifecycleOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketLifecycleOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketLifecycleInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketLifecycleConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketLifecycleConfiguration(_a0 *s3.PutBucketLifecycleConfigurationInput) (*s3.PutBucketLifecycleConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketLifecycleConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketLifecycleConfigurationInput) *s3.PutBucketLifecycleConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketLifecycleConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketLifecycleConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketLifecycleConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketLifecycleConfigurationRequest(_a0 *s3.PutBucketLifecycleConfigurationInput) (*request.Request, *s3.PutBucketLifecycleConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketLifecycleConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketLifecycleConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketLifecycleConfigurationInput) *s3.PutBucketLifecycleConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketLifecycleConfigurationOutput) + } + } + + return r0, r1 +} + +// PutBucketLifecycleConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketLifecycleConfigurationWithContext(_a0 context.Context, _a1 *s3.PutBucketLifecycleConfigurationInput, _a2 ...request.Option) (*s3.PutBucketLifecycleConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketLifecycleConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketLifecycleConfigurationInput, ...request.Option) *s3.PutBucketLifecycleConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketLifecycleConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketLifecycleConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketLifecycleRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketLifecycleRequest(_a0 *s3.PutBucketLifecycleInput) (*request.Request, *s3.PutBucketLifecycleOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketLifecycleInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketLifecycleOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketLifecycleInput) *s3.PutBucketLifecycleOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketLifecycleOutput) + } + } + + return r0, r1 +} + +// PutBucketLifecycleWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketLifecycleWithContext(_a0 context.Context, _a1 *s3.PutBucketLifecycleInput, _a2 ...request.Option) (*s3.PutBucketLifecycleOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketLifecycleOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketLifecycleInput, ...request.Option) *s3.PutBucketLifecycleOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketLifecycleOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketLifecycleInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketLogging provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketLogging(_a0 *s3.PutBucketLoggingInput) (*s3.PutBucketLoggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketLoggingOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketLoggingInput) *s3.PutBucketLoggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketLoggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketLoggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketLoggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketLoggingRequest(_a0 *s3.PutBucketLoggingInput) (*request.Request, *s3.PutBucketLoggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketLoggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketLoggingOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketLoggingInput) *s3.PutBucketLoggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketLoggingOutput) + } + } + + return r0, r1 +} + +// PutBucketLoggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketLoggingWithContext(_a0 context.Context, _a1 *s3.PutBucketLoggingInput, _a2 ...request.Option) (*s3.PutBucketLoggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketLoggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketLoggingInput, ...request.Option) *s3.PutBucketLoggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketLoggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketLoggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketMetricsConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketMetricsConfiguration(_a0 *s3.PutBucketMetricsConfigurationInput) (*s3.PutBucketMetricsConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketMetricsConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketMetricsConfigurationInput) *s3.PutBucketMetricsConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketMetricsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketMetricsConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketMetricsConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketMetricsConfigurationRequest(_a0 *s3.PutBucketMetricsConfigurationInput) (*request.Request, *s3.PutBucketMetricsConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketMetricsConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketMetricsConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketMetricsConfigurationInput) *s3.PutBucketMetricsConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketMetricsConfigurationOutput) + } + } + + return r0, r1 +} + +// PutBucketMetricsConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketMetricsConfigurationWithContext(_a0 context.Context, _a1 *s3.PutBucketMetricsConfigurationInput, _a2 ...request.Option) (*s3.PutBucketMetricsConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketMetricsConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketMetricsConfigurationInput, ...request.Option) *s3.PutBucketMetricsConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketMetricsConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketMetricsConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketNotification provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketNotification(_a0 *s3.PutBucketNotificationInput) (*s3.PutBucketNotificationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketNotificationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketNotificationInput) *s3.PutBucketNotificationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketNotificationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketNotificationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketNotificationConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketNotificationConfiguration(_a0 *s3.PutBucketNotificationConfigurationInput) (*s3.PutBucketNotificationConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketNotificationConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketNotificationConfigurationInput) *s3.PutBucketNotificationConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketNotificationConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketNotificationConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketNotificationConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketNotificationConfigurationRequest(_a0 *s3.PutBucketNotificationConfigurationInput) (*request.Request, *s3.PutBucketNotificationConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketNotificationConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketNotificationConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketNotificationConfigurationInput) *s3.PutBucketNotificationConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketNotificationConfigurationOutput) + } + } + + return r0, r1 +} + +// PutBucketNotificationConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketNotificationConfigurationWithContext(_a0 context.Context, _a1 *s3.PutBucketNotificationConfigurationInput, _a2 ...request.Option) (*s3.PutBucketNotificationConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketNotificationConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketNotificationConfigurationInput, ...request.Option) *s3.PutBucketNotificationConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketNotificationConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketNotificationConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketNotificationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketNotificationRequest(_a0 *s3.PutBucketNotificationInput) (*request.Request, *s3.PutBucketNotificationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketNotificationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketNotificationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketNotificationInput) *s3.PutBucketNotificationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketNotificationOutput) + } + } + + return r0, r1 +} + +// PutBucketNotificationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketNotificationWithContext(_a0 context.Context, _a1 *s3.PutBucketNotificationInput, _a2 ...request.Option) (*s3.PutBucketNotificationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketNotificationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketNotificationInput, ...request.Option) *s3.PutBucketNotificationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketNotificationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketNotificationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketPolicy provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketPolicy(_a0 *s3.PutBucketPolicyInput) (*s3.PutBucketPolicyOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketPolicyOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketPolicyInput) *s3.PutBucketPolicyOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketPolicyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketPolicyInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketPolicyRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketPolicyRequest(_a0 *s3.PutBucketPolicyInput) (*request.Request, *s3.PutBucketPolicyOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketPolicyInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketPolicyOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketPolicyInput) *s3.PutBucketPolicyOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketPolicyOutput) + } + } + + return r0, r1 +} + +// PutBucketPolicyWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketPolicyWithContext(_a0 context.Context, _a1 *s3.PutBucketPolicyInput, _a2 ...request.Option) (*s3.PutBucketPolicyOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketPolicyOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketPolicyInput, ...request.Option) *s3.PutBucketPolicyOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketPolicyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketPolicyInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketReplication provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketReplication(_a0 *s3.PutBucketReplicationInput) (*s3.PutBucketReplicationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketReplicationOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketReplicationInput) *s3.PutBucketReplicationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketReplicationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketReplicationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketReplicationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketReplicationRequest(_a0 *s3.PutBucketReplicationInput) (*request.Request, *s3.PutBucketReplicationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketReplicationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketReplicationOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketReplicationInput) *s3.PutBucketReplicationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketReplicationOutput) + } + } + + return r0, r1 +} + +// PutBucketReplicationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketReplicationWithContext(_a0 context.Context, _a1 *s3.PutBucketReplicationInput, _a2 ...request.Option) (*s3.PutBucketReplicationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketReplicationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketReplicationInput, ...request.Option) *s3.PutBucketReplicationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketReplicationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketReplicationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketRequestPayment provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketRequestPayment(_a0 *s3.PutBucketRequestPaymentInput) (*s3.PutBucketRequestPaymentOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketRequestPaymentOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketRequestPaymentInput) *s3.PutBucketRequestPaymentOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketRequestPaymentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketRequestPaymentInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketRequestPaymentRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketRequestPaymentRequest(_a0 *s3.PutBucketRequestPaymentInput) (*request.Request, *s3.PutBucketRequestPaymentOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketRequestPaymentInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketRequestPaymentOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketRequestPaymentInput) *s3.PutBucketRequestPaymentOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketRequestPaymentOutput) + } + } + + return r0, r1 +} + +// PutBucketRequestPaymentWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketRequestPaymentWithContext(_a0 context.Context, _a1 *s3.PutBucketRequestPaymentInput, _a2 ...request.Option) (*s3.PutBucketRequestPaymentOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketRequestPaymentOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketRequestPaymentInput, ...request.Option) *s3.PutBucketRequestPaymentOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketRequestPaymentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketRequestPaymentInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketTagging provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketTagging(_a0 *s3.PutBucketTaggingInput) (*s3.PutBucketTaggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketTaggingOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketTaggingInput) *s3.PutBucketTaggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketTaggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketTaggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketTaggingRequest(_a0 *s3.PutBucketTaggingInput) (*request.Request, *s3.PutBucketTaggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketTaggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketTaggingOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketTaggingInput) *s3.PutBucketTaggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketTaggingOutput) + } + } + + return r0, r1 +} + +// PutBucketTaggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketTaggingWithContext(_a0 context.Context, _a1 *s3.PutBucketTaggingInput, _a2 ...request.Option) (*s3.PutBucketTaggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketTaggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketTaggingInput, ...request.Option) *s3.PutBucketTaggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketTaggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketVersioning provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketVersioning(_a0 *s3.PutBucketVersioningInput) (*s3.PutBucketVersioningOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketVersioningOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketVersioningInput) *s3.PutBucketVersioningOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketVersioningOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketVersioningInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketVersioningRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketVersioningRequest(_a0 *s3.PutBucketVersioningInput) (*request.Request, *s3.PutBucketVersioningOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketVersioningInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketVersioningOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketVersioningInput) *s3.PutBucketVersioningOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketVersioningOutput) + } + } + + return r0, r1 +} + +// PutBucketVersioningWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketVersioningWithContext(_a0 context.Context, _a1 *s3.PutBucketVersioningInput, _a2 ...request.Option) (*s3.PutBucketVersioningOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketVersioningOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketVersioningInput, ...request.Option) *s3.PutBucketVersioningOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketVersioningOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketVersioningInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketWebsite provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketWebsite(_a0 *s3.PutBucketWebsiteInput) (*s3.PutBucketWebsiteOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutBucketWebsiteOutput + if rf, ok := ret.Get(0).(func(*s3.PutBucketWebsiteInput) *s3.PutBucketWebsiteOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketWebsiteOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutBucketWebsiteInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutBucketWebsiteRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutBucketWebsiteRequest(_a0 *s3.PutBucketWebsiteInput) (*request.Request, *s3.PutBucketWebsiteOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutBucketWebsiteInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutBucketWebsiteOutput + if rf, ok := ret.Get(1).(func(*s3.PutBucketWebsiteInput) *s3.PutBucketWebsiteOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutBucketWebsiteOutput) + } + } + + return r0, r1 +} + +// PutBucketWebsiteWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutBucketWebsiteWithContext(_a0 context.Context, _a1 *s3.PutBucketWebsiteInput, _a2 ...request.Option) (*s3.PutBucketWebsiteOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutBucketWebsiteOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutBucketWebsiteInput, ...request.Option) *s3.PutBucketWebsiteOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutBucketWebsiteOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutBucketWebsiteInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObject provides a mock function with given fields: _a0 +func (_m *S3API) PutObject(_a0 *s3.PutObjectInput) (*s3.PutObjectOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutObjectOutput + if rf, ok := ret.Get(0).(func(*s3.PutObjectInput) *s3.PutObjectOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutObjectInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectAcl provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectAcl(_a0 *s3.PutObjectAclInput) (*s3.PutObjectAclOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutObjectAclOutput + if rf, ok := ret.Get(0).(func(*s3.PutObjectAclInput) *s3.PutObjectAclOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutObjectAclInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectAclRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectAclRequest(_a0 *s3.PutObjectAclInput) (*request.Request, *s3.PutObjectAclOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutObjectAclInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutObjectAclOutput + if rf, ok := ret.Get(1).(func(*s3.PutObjectAclInput) *s3.PutObjectAclOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutObjectAclOutput) + } + } + + return r0, r1 +} + +// PutObjectAclWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutObjectAclWithContext(_a0 context.Context, _a1 *s3.PutObjectAclInput, _a2 ...request.Option) (*s3.PutObjectAclOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutObjectAclOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutObjectAclInput, ...request.Option) *s3.PutObjectAclOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectAclOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutObjectAclInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectLegalHold provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectLegalHold(_a0 *s3.PutObjectLegalHoldInput) (*s3.PutObjectLegalHoldOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutObjectLegalHoldOutput + if rf, ok := ret.Get(0).(func(*s3.PutObjectLegalHoldInput) *s3.PutObjectLegalHoldOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectLegalHoldOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutObjectLegalHoldInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectLegalHoldRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectLegalHoldRequest(_a0 *s3.PutObjectLegalHoldInput) (*request.Request, *s3.PutObjectLegalHoldOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutObjectLegalHoldInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutObjectLegalHoldOutput + if rf, ok := ret.Get(1).(func(*s3.PutObjectLegalHoldInput) *s3.PutObjectLegalHoldOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutObjectLegalHoldOutput) + } + } + + return r0, r1 +} + +// PutObjectLegalHoldWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutObjectLegalHoldWithContext(_a0 context.Context, _a1 *s3.PutObjectLegalHoldInput, _a2 ...request.Option) (*s3.PutObjectLegalHoldOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutObjectLegalHoldOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutObjectLegalHoldInput, ...request.Option) *s3.PutObjectLegalHoldOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectLegalHoldOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutObjectLegalHoldInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectLockConfiguration provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectLockConfiguration(_a0 *s3.PutObjectLockConfigurationInput) (*s3.PutObjectLockConfigurationOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutObjectLockConfigurationOutput + if rf, ok := ret.Get(0).(func(*s3.PutObjectLockConfigurationInput) *s3.PutObjectLockConfigurationOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectLockConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutObjectLockConfigurationInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectLockConfigurationRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectLockConfigurationRequest(_a0 *s3.PutObjectLockConfigurationInput) (*request.Request, *s3.PutObjectLockConfigurationOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutObjectLockConfigurationInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutObjectLockConfigurationOutput + if rf, ok := ret.Get(1).(func(*s3.PutObjectLockConfigurationInput) *s3.PutObjectLockConfigurationOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutObjectLockConfigurationOutput) + } + } + + return r0, r1 +} + +// PutObjectLockConfigurationWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutObjectLockConfigurationWithContext(_a0 context.Context, _a1 *s3.PutObjectLockConfigurationInput, _a2 ...request.Option) (*s3.PutObjectLockConfigurationOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutObjectLockConfigurationOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutObjectLockConfigurationInput, ...request.Option) *s3.PutObjectLockConfigurationOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectLockConfigurationOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutObjectLockConfigurationInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectRequest(_a0 *s3.PutObjectInput) (*request.Request, *s3.PutObjectOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutObjectInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutObjectOutput + if rf, ok := ret.Get(1).(func(*s3.PutObjectInput) *s3.PutObjectOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutObjectOutput) + } + } + + return r0, r1 +} + +// PutObjectRetention provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectRetention(_a0 *s3.PutObjectRetentionInput) (*s3.PutObjectRetentionOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutObjectRetentionOutput + if rf, ok := ret.Get(0).(func(*s3.PutObjectRetentionInput) *s3.PutObjectRetentionOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectRetentionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutObjectRetentionInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectRetentionRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectRetentionRequest(_a0 *s3.PutObjectRetentionInput) (*request.Request, *s3.PutObjectRetentionOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutObjectRetentionInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutObjectRetentionOutput + if rf, ok := ret.Get(1).(func(*s3.PutObjectRetentionInput) *s3.PutObjectRetentionOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutObjectRetentionOutput) + } + } + + return r0, r1 +} + +// PutObjectRetentionWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutObjectRetentionWithContext(_a0 context.Context, _a1 *s3.PutObjectRetentionInput, _a2 ...request.Option) (*s3.PutObjectRetentionOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutObjectRetentionOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutObjectRetentionInput, ...request.Option) *s3.PutObjectRetentionOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectRetentionOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutObjectRetentionInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectTagging provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectTagging(_a0 *s3.PutObjectTaggingInput) (*s3.PutObjectTaggingOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutObjectTaggingOutput + if rf, ok := ret.Get(0).(func(*s3.PutObjectTaggingInput) *s3.PutObjectTaggingOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutObjectTaggingInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectTaggingRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutObjectTaggingRequest(_a0 *s3.PutObjectTaggingInput) (*request.Request, *s3.PutObjectTaggingOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutObjectTaggingInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutObjectTaggingOutput + if rf, ok := ret.Get(1).(func(*s3.PutObjectTaggingInput) *s3.PutObjectTaggingOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutObjectTaggingOutput) + } + } + + return r0, r1 +} + +// PutObjectTaggingWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutObjectTaggingWithContext(_a0 context.Context, _a1 *s3.PutObjectTaggingInput, _a2 ...request.Option) (*s3.PutObjectTaggingOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutObjectTaggingOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutObjectTaggingInput, ...request.Option) *s3.PutObjectTaggingOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectTaggingOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutObjectTaggingInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutObjectWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutObjectWithContext(_a0 context.Context, _a1 *s3.PutObjectInput, _a2 ...request.Option) (*s3.PutObjectOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutObjectOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutObjectInput, ...request.Option) *s3.PutObjectOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutObjectInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutPublicAccessBlock provides a mock function with given fields: _a0 +func (_m *S3API) PutPublicAccessBlock(_a0 *s3.PutPublicAccessBlockInput) (*s3.PutPublicAccessBlockOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.PutPublicAccessBlockOutput + if rf, ok := ret.Get(0).(func(*s3.PutPublicAccessBlockInput) *s3.PutPublicAccessBlockOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutPublicAccessBlockOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.PutPublicAccessBlockInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PutPublicAccessBlockRequest provides a mock function with given fields: _a0 +func (_m *S3API) PutPublicAccessBlockRequest(_a0 *s3.PutPublicAccessBlockInput) (*request.Request, *s3.PutPublicAccessBlockOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.PutPublicAccessBlockInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.PutPublicAccessBlockOutput + if rf, ok := ret.Get(1).(func(*s3.PutPublicAccessBlockInput) *s3.PutPublicAccessBlockOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.PutPublicAccessBlockOutput) + } + } + + return r0, r1 +} + +// PutPublicAccessBlockWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) PutPublicAccessBlockWithContext(_a0 context.Context, _a1 *s3.PutPublicAccessBlockInput, _a2 ...request.Option) (*s3.PutPublicAccessBlockOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.PutPublicAccessBlockOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.PutPublicAccessBlockInput, ...request.Option) *s3.PutPublicAccessBlockOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.PutPublicAccessBlockOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.PutPublicAccessBlockInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RestoreObject provides a mock function with given fields: _a0 +func (_m *S3API) RestoreObject(_a0 *s3.RestoreObjectInput) (*s3.RestoreObjectOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.RestoreObjectOutput + if rf, ok := ret.Get(0).(func(*s3.RestoreObjectInput) *s3.RestoreObjectOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.RestoreObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.RestoreObjectInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RestoreObjectRequest provides a mock function with given fields: _a0 +func (_m *S3API) RestoreObjectRequest(_a0 *s3.RestoreObjectInput) (*request.Request, *s3.RestoreObjectOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.RestoreObjectInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.RestoreObjectOutput + if rf, ok := ret.Get(1).(func(*s3.RestoreObjectInput) *s3.RestoreObjectOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.RestoreObjectOutput) + } + } + + return r0, r1 +} + +// RestoreObjectWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) RestoreObjectWithContext(_a0 context.Context, _a1 *s3.RestoreObjectInput, _a2 ...request.Option) (*s3.RestoreObjectOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.RestoreObjectOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.RestoreObjectInput, ...request.Option) *s3.RestoreObjectOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.RestoreObjectOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.RestoreObjectInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SelectObjectContent provides a mock function with given fields: _a0 +func (_m *S3API) SelectObjectContent(_a0 *s3.SelectObjectContentInput) (*s3.SelectObjectContentOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.SelectObjectContentOutput + if rf, ok := ret.Get(0).(func(*s3.SelectObjectContentInput) *s3.SelectObjectContentOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.SelectObjectContentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.SelectObjectContentInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SelectObjectContentRequest provides a mock function with given fields: _a0 +func (_m *S3API) SelectObjectContentRequest(_a0 *s3.SelectObjectContentInput) (*request.Request, *s3.SelectObjectContentOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.SelectObjectContentInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.SelectObjectContentOutput + if rf, ok := ret.Get(1).(func(*s3.SelectObjectContentInput) *s3.SelectObjectContentOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.SelectObjectContentOutput) + } + } + + return r0, r1 +} + +// SelectObjectContentWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) SelectObjectContentWithContext(_a0 context.Context, _a1 *s3.SelectObjectContentInput, _a2 ...request.Option) (*s3.SelectObjectContentOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.SelectObjectContentOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.SelectObjectContentInput, ...request.Option) *s3.SelectObjectContentOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.SelectObjectContentOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.SelectObjectContentInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UploadPart provides a mock function with given fields: _a0 +func (_m *S3API) UploadPart(_a0 *s3.UploadPartInput) (*s3.UploadPartOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.UploadPartOutput + if rf, ok := ret.Get(0).(func(*s3.UploadPartInput) *s3.UploadPartOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.UploadPartOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.UploadPartInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UploadPartCopy provides a mock function with given fields: _a0 +func (_m *S3API) UploadPartCopy(_a0 *s3.UploadPartCopyInput) (*s3.UploadPartCopyOutput, error) { + ret := _m.Called(_a0) + + var r0 *s3.UploadPartCopyOutput + if rf, ok := ret.Get(0).(func(*s3.UploadPartCopyInput) *s3.UploadPartCopyOutput); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.UploadPartCopyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*s3.UploadPartCopyInput) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UploadPartCopyRequest provides a mock function with given fields: _a0 +func (_m *S3API) UploadPartCopyRequest(_a0 *s3.UploadPartCopyInput) (*request.Request, *s3.UploadPartCopyOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.UploadPartCopyInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.UploadPartCopyOutput + if rf, ok := ret.Get(1).(func(*s3.UploadPartCopyInput) *s3.UploadPartCopyOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.UploadPartCopyOutput) + } + } + + return r0, r1 +} + +// UploadPartCopyWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) UploadPartCopyWithContext(_a0 context.Context, _a1 *s3.UploadPartCopyInput, _a2 ...request.Option) (*s3.UploadPartCopyOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.UploadPartCopyOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.UploadPartCopyInput, ...request.Option) *s3.UploadPartCopyOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.UploadPartCopyOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.UploadPartCopyInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UploadPartRequest provides a mock function with given fields: _a0 +func (_m *S3API) UploadPartRequest(_a0 *s3.UploadPartInput) (*request.Request, *s3.UploadPartOutput) { + ret := _m.Called(_a0) + + var r0 *request.Request + if rf, ok := ret.Get(0).(func(*s3.UploadPartInput) *request.Request); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*request.Request) + } + } + + var r1 *s3.UploadPartOutput + if rf, ok := ret.Get(1).(func(*s3.UploadPartInput) *s3.UploadPartOutput); ok { + r1 = rf(_a0) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*s3.UploadPartOutput) + } + } + + return r0, r1 +} + +// UploadPartWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) UploadPartWithContext(_a0 context.Context, _a1 *s3.UploadPartInput, _a2 ...request.Option) (*s3.UploadPartOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *s3.UploadPartOutput + if rf, ok := ret.Get(0).(func(context.Context, *s3.UploadPartInput, ...request.Option) *s3.UploadPartOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*s3.UploadPartOutput) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *s3.UploadPartInput, ...request.Option) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// WaitUntilBucketExists provides a mock function with given fields: _a0 +func (_m *S3API) WaitUntilBucketExists(_a0 *s3.HeadBucketInput) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.HeadBucketInput) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilBucketExistsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) WaitUntilBucketExistsWithContext(_a0 context.Context, _a1 *s3.HeadBucketInput, _a2 ...request.WaiterOption) error { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.HeadBucketInput, ...request.WaiterOption) error); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilBucketNotExists provides a mock function with given fields: _a0 +func (_m *S3API) WaitUntilBucketNotExists(_a0 *s3.HeadBucketInput) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.HeadBucketInput) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilBucketNotExistsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) WaitUntilBucketNotExistsWithContext(_a0 context.Context, _a1 *s3.HeadBucketInput, _a2 ...request.WaiterOption) error { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.HeadBucketInput, ...request.WaiterOption) error); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilObjectExists provides a mock function with given fields: _a0 +func (_m *S3API) WaitUntilObjectExists(_a0 *s3.HeadObjectInput) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.HeadObjectInput) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilObjectExistsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) WaitUntilObjectExistsWithContext(_a0 context.Context, _a1 *s3.HeadObjectInput, _a2 ...request.WaiterOption) error { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.HeadObjectInput, ...request.WaiterOption) error); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilObjectNotExists provides a mock function with given fields: _a0 +func (_m *S3API) WaitUntilObjectNotExists(_a0 *s3.HeadObjectInput) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(*s3.HeadObjectInput) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WaitUntilObjectNotExistsWithContext provides a mock function with given fields: _a0, _a1, _a2 +func (_m *S3API) WaitUntilObjectNotExistsWithContext(_a0 context.Context, _a1 *s3.HeadObjectInput, _a2 ...request.WaiterOption) error { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *s3.HeadObjectInput, ...request.WaiterOption) error); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/common/archiver/s3store/queryParser.go b/common/archiver/s3store/queryParser.go new file mode 100644 index 00000000000..fcbcd311234 --- /dev/null +++ b/common/archiver/s3store/queryParser.go @@ -0,0 +1,226 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//go:generate mockgen -copyright_file ../../../LICENSE -package $GOPACKAGE -source queryParser.go -destination queryParser_mock.go -mock_names Interface=MockQueryParser + +package s3store + +import ( + "errors" + "fmt" + "strconv" + "time" + + "github.com/xwb1989/sqlparser" + + "github.com/uber/cadence/common" +) + +type ( + // QueryParser parses a limited SQL where clause into a struct + QueryParser interface { + Parse(query string) (*parsedQuery, error) + } + + queryParser struct{} + + parsedQuery struct { + workflowID *string + startTime *int64 + closeTime *int64 + searchPrecision *string + emptyResult bool + } +) + +// All allowed fields for filtering +const ( + WorkflowID = "WorkflowID" + StartTime = "StartTime" + CloseTime = "CloseTime" + SearchPrecision = "SearchPrecision" +) + +// Precision specific values +const ( + PrecisionDay = "Day" + PrecisionHour = "Hour" + PrecisionMinute = "Minute" + PrecisionSecond = "Second" +) +const ( + queryTemplate = "select * from dummy where %s" + defaultDateTimeFormat = time.RFC3339 +) + +// NewQueryParser creates a new query parser for filestore +func NewQueryParser() QueryParser { + return &queryParser{} +} + +func (p *queryParser) Parse(query string) (*parsedQuery, error) { + stmt, err := sqlparser.Parse(fmt.Sprintf(queryTemplate, query)) + if err != nil { + return nil, err + } + whereExpr := stmt.(*sqlparser.Select).Where.Expr + parsedQuery := &parsedQuery{} + if err := p.convertWhereExpr(whereExpr, parsedQuery); err != nil { + return nil, err + } + if parsedQuery.workflowID == nil { + return nil, errors.New("workflowID is required in query") + } + if parsedQuery.closeTime != nil && parsedQuery.startTime != nil { + return nil, errors.New("only one of StartTime or CloseTime can be specified in a query") + } + if (parsedQuery.closeTime != nil || parsedQuery.startTime != nil) && parsedQuery.searchPrecision == nil { + return nil, errors.New("SearchPrecision is required when searching for a StartTime or CloseTime") + } + + if parsedQuery.closeTime == nil && parsedQuery.startTime == nil && parsedQuery.searchPrecision != nil { + return nil, errors.New("SearchPrecision requires a StartTime or CloseTime") + } + return parsedQuery, nil +} + +func (p *queryParser) convertWhereExpr(expr sqlparser.Expr, parsedQuery *parsedQuery) error { + if expr == nil { + return errors.New("where expression is nil") + } + + switch expr.(type) { + case *sqlparser.ComparisonExpr: + return p.convertComparisonExpr(expr.(*sqlparser.ComparisonExpr), parsedQuery) + case *sqlparser.AndExpr: + return p.convertAndExpr(expr.(*sqlparser.AndExpr), parsedQuery) + case *sqlparser.ParenExpr: + return p.convertParenExpr(expr.(*sqlparser.ParenExpr), parsedQuery) + default: + return errors.New("only comparsion and \"and\" expression is supported") + } +} + +func (p *queryParser) convertParenExpr(parenExpr *sqlparser.ParenExpr, parsedQuery *parsedQuery) error { + return p.convertWhereExpr(parenExpr.Expr, parsedQuery) +} + +func (p *queryParser) convertAndExpr(andExpr *sqlparser.AndExpr, parsedQuery *parsedQuery) error { + if err := p.convertWhereExpr(andExpr.Left, parsedQuery); err != nil { + return err + } + return p.convertWhereExpr(andExpr.Right, parsedQuery) +} + +func (p *queryParser) convertComparisonExpr(compExpr *sqlparser.ComparisonExpr, parsedQuery *parsedQuery) error { + colName, ok := compExpr.Left.(*sqlparser.ColName) + if !ok { + return fmt.Errorf("invalid filter name: %s", sqlparser.String(compExpr.Left)) + } + colNameStr := sqlparser.String(colName) + op := compExpr.Operator + valExpr, ok := compExpr.Right.(*sqlparser.SQLVal) + if !ok { + return fmt.Errorf("invalid value: %s", sqlparser.String(compExpr.Right)) + } + valStr := sqlparser.String(valExpr) + + switch colNameStr { + case WorkflowID: + val, err := extractStringValue(valStr) + if err != nil { + return err + } + if op != "=" { + return fmt.Errorf("only operation = is support for %s", WorkflowID) + } + if parsedQuery.workflowID != nil && *parsedQuery.workflowID != val { + parsedQuery.emptyResult = true + return nil + } + parsedQuery.workflowID = common.StringPtr(val) + case CloseTime: + timestamp, err := convertToTimestamp(valStr) + if err != nil { + return err + } + if op != "=" { + return fmt.Errorf("only operation = is support for %s", CloseTime) + } + parsedQuery.closeTime = ×tamp + case StartTime: + timestamp, err := convertToTimestamp(valStr) + if err != nil { + return err + } + if op != "=" { + return fmt.Errorf("only operation = is support for %s", CloseTime) + } + parsedQuery.startTime = ×tamp + case SearchPrecision: + val, err := extractStringValue(valStr) + if err != nil { + return err + } + if op != "=" { + return fmt.Errorf("only operation = is support for %s", SearchPrecision) + } + if parsedQuery.searchPrecision != nil && *parsedQuery.searchPrecision != val { + return fmt.Errorf("only one expression is allowed for %s", SearchPrecision) + } + switch val { + case PrecisionDay: + case PrecisionHour: + case PrecisionMinute: + case PrecisionSecond: + default: + return fmt.Errorf("invalid value for %s: %s", SearchPrecision, val) + } + parsedQuery.searchPrecision = common.StringPtr(val) + + default: + return fmt.Errorf("unknown filter name: %s", colNameStr) + } + + return nil +} + +func convertToTimestamp(timeStr string) (int64, error) { + timestamp, err := strconv.ParseInt(timeStr, 10, 64) + if err == nil { + return timestamp, nil + } + timestampStr, err := extractStringValue(timeStr) + if err != nil { + return 0, err + } + parsedTime, err := time.Parse(defaultDateTimeFormat, timestampStr) + if err != nil { + return 0, err + } + return parsedTime.UnixNano(), nil +} + +func extractStringValue(s string) (string, error) { + if len(s) >= 2 && s[0] == '\'' && s[len(s)-1] == '\'' { + return s[1 : len(s)-1], nil + } + return "", fmt.Errorf("value %s is not a string value", s) +} diff --git a/common/archiver/s3store/queryParser_mock.go b/common/archiver/s3store/queryParser_mock.go new file mode 100644 index 00000000000..4a6dd6c6ff7 --- /dev/null +++ b/common/archiver/s3store/queryParser_mock.go @@ -0,0 +1,72 @@ +// The MIT License (MIT) +// +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +// + +// Code generated by MockGen. DO NOT EDIT. +// Source: queryParser.go + +// Package s3store is a generated GoMock package. +package s3store + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockQueryParser is a mock of QueryParser interface +type MockQueryParser struct { + ctrl *gomock.Controller + recorder *MockQueryParserMockRecorder +} + +// MockQueryParserMockRecorder is the mock recorder for MockQueryParser +type MockQueryParserMockRecorder struct { + mock *MockQueryParser +} + +// NewMockQueryParser creates a new mock instance +func NewMockQueryParser(ctrl *gomock.Controller) *MockQueryParser { + mock := &MockQueryParser{ctrl: ctrl} + mock.recorder = &MockQueryParserMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockQueryParser) EXPECT() *MockQueryParserMockRecorder { + return m.recorder +} + +// Parse mocks base method +func (m *MockQueryParser) Parse(query string) (*parsedQuery, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Parse", query) + ret0, _ := ret[0].(*parsedQuery) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Parse indicates an expected call of Parse +func (mr *MockQueryParserMockRecorder) Parse(query interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Parse", reflect.TypeOf((*MockQueryParser)(nil).Parse), query) +} diff --git a/common/archiver/s3store/queryParser_test.go b/common/archiver/s3store/queryParser_test.go new file mode 100644 index 00000000000..e4e0526de8c --- /dev/null +++ b/common/archiver/s3store/queryParser_test.go @@ -0,0 +1,268 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package s3store + +import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + + "github.com/uber/cadence/common" +) + +type queryParserSuite struct { + *require.Assertions + suite.Suite + + parser QueryParser +} + +func TestQueryParserSuite(t *testing.T) { + suite.Run(t, new(queryParserSuite)) +} + +func (s *queryParserSuite) SetupTest() { + s.Assertions = require.New(s.T()) + s.parser = NewQueryParser() +} + +func (s *queryParserSuite) TestParseWorkflowID() { + testCases := []struct { + query string + expectErr bool + parsedQuery *parsedQuery + }{ + { + query: "WorkflowID = \"random workflowID\"", + expectErr: false, + parsedQuery: &parsedQuery{ + workflowID: common.StringPtr("random workflowID"), + }, + }, + { + query: "WorkflowID = \"random workflowID\" and WorkflowID = \"random workflowID\"", + expectErr: false, + parsedQuery: &parsedQuery{ + workflowID: common.StringPtr("random workflowID"), + }, + }, + { + query: "RunID = \"random runID\"", + expectErr: true, + }, + { + query: "WorkflowID = 'random workflowID'", + expectErr: false, + parsedQuery: &parsedQuery{ + workflowID: common.StringPtr("random workflowID"), + }, + }, + { + query: "(WorkflowID = \"random workflowID\")", + expectErr: false, + parsedQuery: &parsedQuery{ + workflowID: common.StringPtr("random workflowID"), + }, + }, + { + query: "runID = random workflowID", + expectErr: true, + }, + { + query: "WorkflowID = \"random workflowID\" or WorkflowID = \"another workflowID\"", + expectErr: true, + }, + { + query: "WorkflowID = \"random workflowID\" or runID = \"random runID\"", + expectErr: true, + }, + { + query: "workflowid = \"random workflowID\"", + expectErr: true, + }, + { + query: "runID > \"random workflowID\"", + expectErr: true, + }, + } + + for _, tc := range testCases { + parsedQuery, err := s.parser.Parse(tc.query) + if tc.expectErr { + s.Error(err) + continue + } + s.NoError(err) + s.Equal(tc.parsedQuery.emptyResult, parsedQuery.emptyResult) + if !tc.parsedQuery.emptyResult { + s.Equal(tc.parsedQuery.workflowID, parsedQuery.workflowID) + } + } +} + +func (s *queryParserSuite) TestParsePrecision() { + commonQueryPart := "WorkflowID = \"random workflowID\" AND " + testCases := []struct { + query string + expectErr bool + parsedQuery *parsedQuery + }{ + { + query: commonQueryPart + "CloseTime = 1000 and SearchPrecision = 'Day'", + expectErr: false, + parsedQuery: &parsedQuery{ + searchPrecision: common.StringPtr(PrecisionDay), + }, + }, + { + query: commonQueryPart + "CloseTime = 1000 and SearchPrecision = 'Hour'", + expectErr: false, + parsedQuery: &parsedQuery{ + searchPrecision: common.StringPtr(PrecisionHour), + }, + }, + { + query: commonQueryPart + "CloseTime = 1000 and SearchPrecision = 'Minute'", + expectErr: false, + parsedQuery: &parsedQuery{ + searchPrecision: common.StringPtr(PrecisionMinute), + }, + }, + { + query: commonQueryPart + "StartTime = 1000 and SearchPrecision = 'Second'", + expectErr: false, + parsedQuery: &parsedQuery{ + searchPrecision: common.StringPtr(PrecisionSecond), + }, + }, + { + query: commonQueryPart + "SearchPrecision = 'Second'", + expectErr: true, + }, + { + query: commonQueryPart + "SearchPrecision = 'Invalid string'", + expectErr: true, + }, + } + + for _, tc := range testCases { + parsedQuery, err := s.parser.Parse(tc.query) + if tc.expectErr { + s.Error(err) + continue + } + s.NoError(err) + s.Equal(tc.parsedQuery.searchPrecision, parsedQuery.searchPrecision) + } +} + +func (s *queryParserSuite) TestParseCloseTime() { + commonQueryPart := "WorkflowID = \"random workflowID\" AND SearchPrecision = 'Day' AND " + + testCases := []struct { + query string + expectErr bool + parsedQuery *parsedQuery + }{ + { + query: commonQueryPart + "CloseTime = 1000", + expectErr: false, + parsedQuery: &parsedQuery{ + closeTime: common.Int64Ptr(1000), + }, + }, + { + query: commonQueryPart + "CloseTime = \"2019-01-01T11:11:11Z\"", + expectErr: false, + parsedQuery: &parsedQuery{ + closeTime: common.Int64Ptr(1546341071000000000), + }, + }, + { + query: commonQueryPart + "closeTime = 2000", + expectErr: true, + }, + { + query: commonQueryPart + "CloseTime > \"2019-01-01 00:00:00\"", + expectErr: true, + }, + } + + for _, tc := range testCases { + parsedQuery, err := s.parser.Parse(tc.query) + if tc.expectErr { + s.Error(err) + continue + } + s.NoError(err) + s.Equal(tc.parsedQuery.emptyResult, parsedQuery.emptyResult) + if !tc.parsedQuery.emptyResult { + s.Equal(tc.parsedQuery.closeTime, parsedQuery.closeTime) + } + } +} + +func (s *queryParserSuite) TestParseStartTime() { + commonQueryPart := "WorkflowID = \"random workflowID\" AND SearchPrecision = 'Day' AND " + + testCases := []struct { + query string + expectErr bool + parsedQuery *parsedQuery + }{ + { + query: commonQueryPart + "StartTime = 1000", + expectErr: false, + parsedQuery: &parsedQuery{ + startTime: common.Int64Ptr(1000), + }, + }, + { + query: commonQueryPart + "StartTime = \"2019-01-01T11:11:11Z\"", + expectErr: false, + parsedQuery: &parsedQuery{ + startTime: common.Int64Ptr(1546341071000000000), + }, + }, + { + query: commonQueryPart + "startTime = 2000", + expectErr: true, + }, + { + query: commonQueryPart + "StartTime > \"2019-01-01 00:00:00\"", + expectErr: true, + }, + } + + for _, tc := range testCases { + parsedQuery, err := s.parser.Parse(tc.query) + if tc.expectErr { + s.Error(err) + continue + } + s.NoError(err) + s.Equal(tc.parsedQuery.emptyResult, parsedQuery.emptyResult) + if !tc.parsedQuery.emptyResult { + s.Equal(tc.parsedQuery.closeTime, parsedQuery.closeTime) + } + } +} diff --git a/common/archiver/s3store/util.go b/common/archiver/s3store/util.go new file mode 100644 index 00000000000..66bd5ea482e --- /dev/null +++ b/common/archiver/s3store/util.go @@ -0,0 +1,237 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package s3store + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io/ioutil" + "strings" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3iface" + "go.uber.org/multierr" + + "github.com/uber/cadence/common" + + "github.com/uber/cadence/.gen/go/shared" + "github.com/uber/cadence/common/archiver" +) + +// encoding & decoding util + +func encode(v interface{}) ([]byte, error) { + return json.Marshal(v) +} + +func decodeHistoryBatches(data []byte) ([]*shared.History, error) { + historyBatches := []*shared.History{} + err := json.Unmarshal(data, &historyBatches) + if err != nil { + return nil, err + } + return historyBatches, nil +} +func decodeVisibilityRecord(data []byte) (*visibilityRecord, error) { + record := &visibilityRecord{} + err := json.Unmarshal(data, record) + if err != nil { + return nil, err + } + return record, nil +} + +func serializeToken(token interface{}) ([]byte, error) { + if token == nil { + return nil, nil + } + return json.Marshal(token) +} + +func deserializeGetHistoryToken(bytes []byte) (*getHistoryToken, error) { + token := &getHistoryToken{} + err := json.Unmarshal(bytes, token) + return token, err +} + +func deserializeQueryVisibilityToken(bytes []byte) *string { + var ret = string(bytes) + return &ret +} +func serializeQueryVisibilityToken(token string) []byte { + return []byte(token) +} + +// Only validates the scheme and buckets are passed +func softValidateURI(URI archiver.URI) error { + if URI.Scheme() != URIScheme { + return archiver.ErrURISchemeMismatch + } + if len(URI.Hostname()) == 0 { + return errNoBucketSpecified + } + return nil +} + +func bucketExists(ctx context.Context, s3cli s3iface.S3API, URI archiver.URI) error { + ctx, cancel := ensureContextTimeout(ctx) + defer cancel() + _, err := s3cli.HeadBucketWithContext(ctx, &s3.HeadBucketInput{ + Bucket: aws.String(URI.Hostname()), + }) + if err == nil { + return nil + } + if aerr, ok := err.(awserr.Error); ok { + if aerr.Code() == "NotFound" { + return errBucketNotExists + } + } + return err +} + +// Key construction +func constructHistoryKey(path, domainID, workflowID, runID string, version int64) string { + prefix := constructHistoryKeyPrefix(path, domainID, workflowID, runID) + return fmt.Sprintf("%s/%v", prefix, version) +} + +func constructHistoryKeyPrefix(path, domainID, workflowID, runID string) string { + return fmt.Sprintf("%s/%s", constructCommonKeyPrefix(path, domainID, workflowID, "history"), runID) +} + +func constructVisibilitySearchPrefix(path, domainID, workflowID, indexType string) string { + return constructCommonKeyPrefix(path, domainID, workflowID, fmt.Sprintf("visibility/%s", indexType)) +} + +// Make sure there are no trailing slashes +func constructCommonKeyPrefix(path, domainID, workflowID, entryType string) string { + return strings.TrimLeft(strings.Join([]string{path, domainID, workflowID, entryType}, "/"), "/") +} + +func ensureContextTimeout(ctx context.Context) (context.Context, context.CancelFunc) { + if _, ok := ctx.Deadline(); ok { + return ctx, func() {} + } + return context.WithTimeout(ctx, defaultBlobstoreTimeout) +} +func upload(ctx context.Context, s3cli s3iface.S3API, URI archiver.URI, key string, data []byte) error { + ctx, cancel := ensureContextTimeout(ctx) + defer cancel() + + _, err := s3cli.PutObjectWithContext(ctx, &s3.PutObjectInput{ + Bucket: aws.String(URI.Hostname()), + Key: aws.String(key), + Body: bytes.NewReader(data), + }) + if err != nil { + if aerr, ok := err.(awserr.Error); ok { + if aerr.Code() == s3.ErrCodeNoSuchBucket { + return &shared.BadRequestError{Message: errBucketNotExists.Error()} + } + } + return err + } + return nil +} + +func download(ctx context.Context, s3cli s3iface.S3API, URI archiver.URI, key string) ([]byte, error) { + ctx, cancel := ensureContextTimeout(ctx) + defer cancel() + result, err := s3cli.GetObjectWithContext(ctx, &s3.GetObjectInput{ + Bucket: aws.String(URI.Hostname()), + Key: aws.String(key), + }) + + if err != nil { + if aerr, ok := err.(awserr.Error); ok { + if aerr.Code() == s3.ErrCodeNoSuchBucket { + return nil, &shared.BadRequestError{Message: errBucketNotExists.Error()} + } + + if aerr.Code() == s3.ErrCodeNoSuchKey { + return nil, &shared.BadRequestError{Message: archiver.ErrHistoryNotExist.Error()} + } + } + return nil, err + } + + defer func() { + if ierr := result.Body.Close(); ierr != nil { + err = multierr.Append(err, ierr) + } + }() + + body, err := ioutil.ReadAll(result.Body) + if err != nil { + return nil, err + } + return body, nil +} + +func historyMutated(request *archiver.ArchiveHistoryRequest, historyBatches []*shared.History, isLast bool) bool { + lastBatch := historyBatches[len(historyBatches)-1].Events + lastEvent := lastBatch[len(lastBatch)-1] + lastFailoverVersion := lastEvent.GetVersion() + if lastFailoverVersion > request.CloseFailoverVersion { + return true + } + + if !isLast { + return false + } + lastEventID := lastEvent.GetEventId() + return lastFailoverVersion != request.CloseFailoverVersion || lastEventID+1 != request.NextEventID +} + +func contextExpired(ctx context.Context) bool { + select { + case <-ctx.Done(): + return true + default: + return false + } +} + +func convertToExecutionInfo(record *visibilityRecord) *shared.WorkflowExecutionInfo { + return &shared.WorkflowExecutionInfo{ + Execution: &shared.WorkflowExecution{ + WorkflowId: common.StringPtr(record.WorkflowID), + RunId: common.StringPtr(record.RunID), + }, + Type: &shared.WorkflowType{ + Name: common.StringPtr(record.WorkflowTypeName), + }, + StartTime: common.Int64Ptr(record.StartTimestamp), + ExecutionTime: common.Int64Ptr(record.ExecutionTimestamp), + CloseTime: common.Int64Ptr(record.CloseTimestamp), + CloseStatus: record.CloseStatus.Ptr(), + HistoryLength: common.Int64Ptr(record.HistoryLength), + Memo: record.Memo, + SearchAttributes: &shared.SearchAttributes{ + IndexedFields: archiver.ConvertSearchAttrToBytes(record.SearchAttributes), + }, + } +} diff --git a/common/archiver/s3store/visibilityArchiver.go b/common/archiver/s3store/visibilityArchiver.go new file mode 100644 index 00000000000..eafb3ae4385 --- /dev/null +++ b/common/archiver/s3store/visibilityArchiver.go @@ -0,0 +1,260 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package s3store + +import ( + "context" + "fmt" + "time" + + "github.com/uber/cadence/common/metrics" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3iface" + + "github.com/uber/cadence/.gen/go/shared" + "github.com/uber/cadence/common/archiver" + "github.com/uber/cadence/common/log/tag" + "github.com/uber/cadence/common/service/config" +) + +type ( + visibilityArchiver struct { + container *archiver.VisibilityBootstrapContainer + s3cli s3iface.S3API + queryParser QueryParser + } + + visibilityRecord archiver.ArchiveVisibilityRequest + + queryVisibilityRequest struct { + domainID string + pageSize int + nextPageToken []byte + parsedQuery *parsedQuery + } +) + +const ( + errEncodeVisibilityRecord = "failed to encode visibility record" + indexKeyStartTimeout = "startTimeout" + indexKeyCloseTimeout = "closeTimeout" +) + +// NewVisibilityArchiver creates a new archiver.VisibilityArchiver based on s3 +func NewVisibilityArchiver( + container *archiver.VisibilityBootstrapContainer, + config *config.S3Archiver, +) (archiver.VisibilityArchiver, error) { + return newVisibilityArchiver(container, config) +} + +func newVisibilityArchiver( + container *archiver.VisibilityBootstrapContainer, + config *config.S3Archiver) (*visibilityArchiver, error) { + s3Config := &aws.Config{ + Endpoint: config.Endpoint, + Region: aws.String(config.Region), + S3ForcePathStyle: aws.Bool(config.S3ForcePathStyle), + MaxRetries: aws.Int(0), + } + sess, err := session.NewSession(s3Config) + if err != nil { + return nil, err + } + return &visibilityArchiver{ + container: container, + s3cli: s3.New(sess), + queryParser: NewQueryParser(), + }, nil +} + +func (v *visibilityArchiver) Archive( + ctx context.Context, + URI archiver.URI, + request *archiver.ArchiveVisibilityRequest, + opts ...archiver.ArchiveOption, +) (err error) { + scope := v.container.MetricsClient.Scope(metrics.VisibilityArchiverScope, metrics.DomainTag(request.DomainName)) + featureCatalog := archiver.GetFeatureCatalog(opts...) + sw := scope.StartTimer(metrics.CadenceLatency) + defer func() { + sw.Stop() + if err != nil { + if isRetryableError(err) { + scope.IncCounter(metrics.VisibilityArchiverArchiveTransientErrorCount) + } else { + scope.IncCounter(metrics.VisibilityArchiverArchiveNonRetryableErrorCount) + if featureCatalog.NonRetriableError != nil { + err = featureCatalog.NonRetriableError() + } + } + } + }() + + logger := archiver.TagLoggerWithArchiveVisibilityRequestAndURI(v.container.Logger, request, URI.String()) + + if err := softValidateURI(URI); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(archiver.ErrReasonInvalidURI), tag.Error(err)) + return err + } + + if err := archiver.ValidateVisibilityArchivalRequest(request); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(archiver.ErrReasonInvalidArchiveRequest), tag.Error(err)) + return err + } + + encodedVisibilityRecord, err := encode(request) + if err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(errEncodeVisibilityRecord), tag.Error(err)) + return err + } + + // Upload archive to all indexes + key := constructTimestampIndex(URI.Path(), request.DomainID, request.WorkflowID, indexKeyCloseTimeout, request.CloseTimestamp, request.RunID) + if err := upload(ctx, v.s3cli, URI, key, encodedVisibilityRecord); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(errWriteKey), tag.Error(err)) + return err + } + key = constructTimestampIndex(URI.Path(), request.DomainID, request.WorkflowID, indexKeyStartTimeout, request.StartTimestamp, request.RunID) + if err := upload(ctx, v.s3cli, URI, key, encodedVisibilityRecord); err != nil { + logger.Error(archiver.ArchiveNonRetriableErrorMsg, tag.ArchivalArchiveFailReason(errWriteKey), tag.Error(err)) + return err + } + scope.IncCounter(metrics.VisibilityArchiveSuccessCount) + return nil +} + +func constructTimeBasedSearchKey(path, domainID, workflowID, indexKey string, timestamp int64, precision string) string { + t := time.Unix(0, timestamp).In(time.UTC) + var timeFormat = "" + switch precision { + case PrecisionSecond: + timeFormat = ":05" + fallthrough + case PrecisionMinute: + timeFormat = ":04" + timeFormat + fallthrough + case PrecisionHour: + timeFormat = "15" + timeFormat + fallthrough + case PrecisionDay: + timeFormat = "2006-01-02T" + timeFormat + } + + return fmt.Sprintf("%s/%s", constructVisibilitySearchPrefix(path, domainID, workflowID, indexKey), t.Format(timeFormat)) +} +func constructTimestampIndex(path, domainID, workflowID, indexKey string, timestamp int64, runID string) string { + t := time.Unix(0, timestamp).In(time.UTC) + return fmt.Sprintf("%s/%s/%s", constructVisibilitySearchPrefix(path, domainID, workflowID, indexKey), t.Format(time.RFC3339), runID) +} +func (v *visibilityArchiver) Query( + ctx context.Context, + URI archiver.URI, + request *archiver.QueryVisibilityRequest, +) (*archiver.QueryVisibilityResponse, error) { + if err := softValidateURI(URI); err != nil { + return nil, &shared.BadRequestError{Message: archiver.ErrInvalidURI.Error()} + } + + if err := archiver.ValidateQueryRequest(request); err != nil { + return nil, &shared.BadRequestError{Message: archiver.ErrInvalidQueryVisibilityRequest.Error()} + } + + parsedQuery, err := v.queryParser.Parse(request.Query) + if err != nil { + return nil, &shared.BadRequestError{Message: err.Error()} + } + if parsedQuery.emptyResult { + return &archiver.QueryVisibilityResponse{}, nil + } + + return v.query(ctx, URI, &queryVisibilityRequest{ + domainID: request.DomainID, + pageSize: request.PageSize, + nextPageToken: request.NextPageToken, + parsedQuery: parsedQuery, + }) +} + +func (v *visibilityArchiver) query( + ctx context.Context, + URI archiver.URI, + request *queryVisibilityRequest, +) (*archiver.QueryVisibilityResponse, error) { + ctx, cancel := ensureContextTimeout(ctx) + defer cancel() + var token *string + if request.nextPageToken != nil { + token = deserializeQueryVisibilityToken(request.nextPageToken) + } + var prefix = constructVisibilitySearchPrefix(URI.Path(), request.domainID, *request.parsedQuery.workflowID, indexKeyCloseTimeout) + "/" + if request.parsedQuery.closeTime != nil { + prefix = constructTimeBasedSearchKey(URI.Path(), request.domainID, *request.parsedQuery.workflowID, indexKeyCloseTimeout, *request.parsedQuery.closeTime, *request.parsedQuery.searchPrecision) + } + if request.parsedQuery.startTime != nil { + prefix = constructTimeBasedSearchKey(URI.Path(), request.domainID, *request.parsedQuery.workflowID, indexKeyStartTimeout, *request.parsedQuery.startTime, *request.parsedQuery.searchPrecision) + } + + results, err := v.s3cli.ListObjectsV2WithContext(ctx, &s3.ListObjectsV2Input{ + Bucket: aws.String(URI.Hostname()), + Prefix: aws.String(prefix), + MaxKeys: aws.Int64(int64(request.pageSize)), + ContinuationToken: token, + }) + if err != nil { + if isRetryableError(err) { + return nil, &shared.InternalServiceError{Message: err.Error()} + } + return nil, &shared.BadRequestError{Message: err.Error()} + } + if len(results.Contents) == 0 { + return &archiver.QueryVisibilityResponse{}, nil + } + + response := &archiver.QueryVisibilityResponse{} + if *results.IsTruncated { + response.NextPageToken = serializeQueryVisibilityToken(*results.NextContinuationToken) + } + for _, item := range results.Contents { + encodedRecord, err := download(ctx, v.s3cli, URI, *item.Key) + if err != nil { + return nil, &shared.InternalServiceError{Message: err.Error()} + } + + record, err := decodeVisibilityRecord(encodedRecord) + if err != nil { + return nil, &shared.InternalServiceError{Message: err.Error()} + } + response.Executions = append(response.Executions, convertToExecutionInfo(record)) + } + return response, nil +} + +func (v *visibilityArchiver) ValidateURI(URI archiver.URI) error { + err := softValidateURI(URI) + if err != nil { + return err + } + return bucketExists(context.TODO(), v.s3cli, URI) +} diff --git a/common/archiver/s3store/visibilityArchiver_test.go b/common/archiver/s3store/visibilityArchiver_test.go new file mode 100644 index 00000000000..49b975d9f85 --- /dev/null +++ b/common/archiver/s3store/visibilityArchiver_test.go @@ -0,0 +1,558 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package s3store + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "testing" + "time" + + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/stretchr/testify/mock" + + "github.com/uber-go/tally" + + "github.com/uber/cadence/common/metrics" + + "go.uber.org/zap" + + "github.com/uber/cadence/.gen/go/shared" + "github.com/uber/cadence/common" + "github.com/uber/cadence/common/archiver/s3store/mocks" + "github.com/uber/cadence/common/log/loggerimpl" + + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + + "github.com/uber/cadence/common/archiver" + "github.com/uber/cadence/common/log" +) + +type visibilityArchiverSuite struct { + *require.Assertions + suite.Suite + s3cli *mocks.S3API + + container *archiver.VisibilityBootstrapContainer + logger log.Logger + visibilityRecords []*visibilityRecord + + controller *gomock.Controller + testArchivalURI archiver.URI +} + +func TestVisibilityArchiverSuite(t *testing.T) { + suite.Run(t, new(visibilityArchiverSuite)) +} + +func (s *visibilityArchiverSuite) TestValidateURI() { + testCases := []struct { + URI string + expectedErr error + }{ + { + URI: "wrongscheme:///a/b/c", + expectedErr: archiver.ErrURISchemeMismatch, + }, + { + URI: "s3://", + expectedErr: errNoBucketSpecified, + }, + { + URI: "s3:///test", + expectedErr: errNoBucketSpecified, + }, + { + URI: "s3://bucket/a/b/c", + expectedErr: errBucketNotExists, + }, + { + URI: testBucketURI, + expectedErr: nil, + }, + } + + s.s3cli.On("HeadBucketWithContext", mock.Anything, mock.MatchedBy(func(input *s3.HeadBucketInput) bool { + return *input.Bucket != s.testArchivalURI.Hostname() + })).Return(nil, awserr.New("NotFound", "", nil)) + s.s3cli.On("HeadBucketWithContext", mock.Anything, mock.Anything).Return(&s3.HeadBucketOutput{}, nil) + + visibilityArchiver := s.newTestVisibilityArchiver() + for _, tc := range testCases { + URI, err := archiver.NewURI(tc.URI) + s.NoError(err) + s.Equal(tc.expectedErr, visibilityArchiver.ValidateURI(URI)) + } +} + +func (s *visibilityArchiverSuite) newTestVisibilityArchiver() *visibilityArchiver { + archiver := &visibilityArchiver{ + container: s.container, + s3cli: s.s3cli, + queryParser: NewQueryParser(), + } + return archiver +} + +const ( + testWorkflowTypeName = "test-workflow-type" +) + +func (s *visibilityArchiverSuite) SetupSuite() { + var err error + scope := tally.NewTestScope("test", nil) + s.s3cli = &mocks.S3API{} + setupFsEmulation(s.s3cli) + + s.testArchivalURI, err = archiver.NewURI(testBucketURI) + s.Require().NoError(err) + + zapLogger := zap.NewNop() + s.container = &archiver.VisibilityBootstrapContainer{ + Logger: loggerimpl.NewLogger(zapLogger), + MetricsClient: metrics.NewClient(scope, metrics.VisibilityArchiverScope), + } + s.setupVisibilityDirectory() +} + +func (s *visibilityArchiverSuite) TearDownSuite() { + +} + +func (s *visibilityArchiverSuite) SetupTest() { + s.Assertions = require.New(s.T()) + s.controller = gomock.NewController(s.T()) +} + +func (s *visibilityArchiverSuite) TearDownTest() { + s.controller.Finish() +} +func (s *visibilityArchiverSuite) TestArchive_Fail_InvalidURI() { + visibilityArchiver := s.newTestVisibilityArchiver() + URI, err := archiver.NewURI("wrongscheme://") + s.NoError(err) + request := &archiver.ArchiveVisibilityRequest{ + DomainName: testDomainName, + DomainID: testDomainID, + WorkflowID: testWorkflowID, + RunID: testRunID, + WorkflowTypeName: testWorkflowTypeName, + StartTimestamp: time.Now().UnixNano(), + ExecutionTimestamp: 0, // workflow without backoff + CloseTimestamp: time.Now().UnixNano(), + CloseStatus: shared.WorkflowExecutionCloseStatusFailed, + HistoryLength: int64(101), + } + err = visibilityArchiver.Archive(context.Background(), URI, request) + s.Error(err) +} + +func (s *visibilityArchiverSuite) TestArchive_Fail_InvalidRequest() { + visibilityArchiver := s.newTestVisibilityArchiver() + err := visibilityArchiver.Archive(context.Background(), s.testArchivalURI, &archiver.ArchiveVisibilityRequest{}) + s.Error(err) +} + +func (s *visibilityArchiverSuite) TestArchive_Fail_NonRetriableErrorOption() { + visibilityArchiver := s.newTestVisibilityArchiver() + nonRetriableErr := errors.New("some non-retryable error") + err := visibilityArchiver.Archive( + context.Background(), + s.testArchivalURI, + &archiver.ArchiveVisibilityRequest{ + DomainID: testDomainID, + }, + archiver.GetNonRetriableErrorOption(nonRetriableErr), + ) + s.Equal(nonRetriableErr, err) +} + +func (s *visibilityArchiverSuite) TestArchive_Success() { + visibilityArchiver := s.newTestVisibilityArchiver() + closeTimestamp := time.Now() + request := &archiver.ArchiveVisibilityRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + WorkflowTypeName: testWorkflowTypeName, + StartTimestamp: closeTimestamp.Add(-time.Hour).UnixNano(), + ExecutionTimestamp: 0, // workflow without backoff + CloseTimestamp: closeTimestamp.UnixNano(), + CloseStatus: shared.WorkflowExecutionCloseStatusFailed, + HistoryLength: int64(101), + Memo: &shared.Memo{ + Fields: map[string][]byte{ + "testFields": {1, 2, 3}, + }, + }, + SearchAttributes: map[string]string{ + "testAttribute": "456", + }, + } + URI, err := archiver.NewURI(testBucketURI + "/test-archive-success") + s.NoError(err) + err = visibilityArchiver.Archive(context.Background(), URI, request) + s.NoError(err) + + expectedKey := constructTimestampIndex(URI.Path(), testDomainID, testWorkflowID, indexKeyCloseTimeout, closeTimestamp.UnixNano(), testRunID) + data, err := download(context.Background(), visibilityArchiver.s3cli, URI, expectedKey) + s.NoError(err, expectedKey) + + archivedRecord := &archiver.ArchiveVisibilityRequest{} + err = json.Unmarshal(data, archivedRecord) + s.NoError(err) + s.Equal(request, archivedRecord) +} + +func (s *visibilityArchiverSuite) TestQuery_Fail_InvalidURI() { + visibilityArchiver := s.newTestVisibilityArchiver() + URI, err := archiver.NewURI("wrongscheme://") + s.NoError(err) + request := &archiver.QueryVisibilityRequest{ + DomainID: testDomainID, + PageSize: 1, + } + response, err := visibilityArchiver.Query(context.Background(), URI, request) + s.Error(err) + s.Nil(response) +} + +func (s *visibilityArchiverSuite) TestQuery_Fail_InvalidRequest() { + visibilityArchiver := s.newTestVisibilityArchiver() + response, err := visibilityArchiver.Query(context.Background(), s.testArchivalURI, &archiver.QueryVisibilityRequest{}) + s.Error(err) + s.Nil(response) +} + +func (s *visibilityArchiverSuite) TestQuery_Fail_InvalidQuery() { + visibilityArchiver := s.newTestVisibilityArchiver() + mockParser := NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(nil, errors.New("invalid query")) + visibilityArchiver.queryParser = mockParser + response, err := visibilityArchiver.Query(context.Background(), s.testArchivalURI, &archiver.QueryVisibilityRequest{ + DomainID: "some random domainID", + PageSize: 10, + Query: "some invalid query", + }) + s.Error(err) + s.Nil(response) +} +func (s *visibilityArchiverSuite) TestQuery_Success_DirectoryNotExist() { + visibilityArchiver := s.newTestVisibilityArchiver() + mockParser := NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(&parsedQuery{ + workflowID: common.StringPtr(testWorkflowID), + closeTime: common.Int64Ptr(0), + searchPrecision: common.StringPtr(PrecisionSecond), + }, nil) + visibilityArchiver.queryParser = mockParser + request := &archiver.QueryVisibilityRequest{ + DomainID: testDomainID, + Query: "parsed by mockParser", + PageSize: 1, + } + response, err := visibilityArchiver.Query(context.Background(), s.testArchivalURI, request) + s.NoError(err) + s.NotNil(response) + s.Empty(response.Executions) + s.Empty(response.NextPageToken) +} + +func (s *visibilityArchiverSuite) TestQuery_Success_NoNextPageToken() { + visibilityArchiver := s.newTestVisibilityArchiver() + mockParser := NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(&parsedQuery{ + closeTime: common.Int64Ptr(int64(1 * time.Hour)), + searchPrecision: common.StringPtr(PrecisionHour), + workflowID: common.StringPtr(testWorkflowID), + }, nil) + visibilityArchiver.queryParser = mockParser + request := &archiver.QueryVisibilityRequest{ + DomainID: testDomainID, + PageSize: 10, + Query: "parsed by mockParser", + } + URI, err := archiver.NewURI(testBucketURI) + s.NoError(err) + response, err := visibilityArchiver.Query(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.Nil(response.NextPageToken) + s.Len(response.Executions, 2) + s.Equal(convertToExecutionInfo(s.visibilityRecords[0]), response.Executions[0]) +} + +func (s *visibilityArchiverSuite) TestQuery_Success_SmallPageSize() { + visibilityArchiver := s.newTestVisibilityArchiver() + mockParser := NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(&parsedQuery{ + closeTime: common.Int64Ptr(0), + searchPrecision: common.StringPtr(PrecisionDay), + workflowID: common.StringPtr(testWorkflowID), + }, nil).AnyTimes() + visibilityArchiver.queryParser = mockParser + request := &archiver.QueryVisibilityRequest{ + DomainID: testDomainID, + PageSize: 2, + Query: "parsed by mockParser", + } + URI, err := archiver.NewURI(testBucketURI) + s.NoError(err) + response, err := visibilityArchiver.Query(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.NotNil(response.NextPageToken) + s.Len(response.Executions, 2) + s.Equal(convertToExecutionInfo(s.visibilityRecords[0]), response.Executions[0]) + s.Equal(convertToExecutionInfo(s.visibilityRecords[1]), response.Executions[1]) + + request.NextPageToken = response.NextPageToken + response, err = visibilityArchiver.Query(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.Nil(response.NextPageToken) + s.Len(response.Executions, 1) + s.Equal(convertToExecutionInfo(s.visibilityRecords[2]), response.Executions[0]) +} + +type precisionTest struct { + day int64 + hour int64 + minute int64 + second int64 + precision string +} + +func (s *visibilityArchiverSuite) TestArchiveAndQueryPrecisions() { + precisionTests := []*precisionTest{ + { + day: 1, + hour: 0, + minute: 0, + second: 0, + precision: PrecisionDay, + }, + { + day: 1, + hour: 1, + minute: 0, + second: 0, + precision: PrecisionDay, + }, + { + day: 2, + hour: 1, + minute: 0, + second: 0, + precision: PrecisionHour, + }, + { + day: 2, + hour: 1, + minute: 30, + second: 0, + precision: PrecisionHour, + }, + { + day: 3, + hour: 2, + minute: 1, + second: 0, + precision: PrecisionMinute, + }, + { + day: 3, + hour: 2, + minute: 1, + second: 30, + precision: PrecisionMinute, + }, + { + day: 4, + hour: 3, + minute: 2, + second: 1, + precision: PrecisionSecond, + }, + { + day: 4, + hour: 3, + minute: 2, + second: 1, + precision: PrecisionSecond, + }, + { + day: 4, + hour: 3, + minute: 2, + second: 2, + precision: PrecisionSecond, + }, + { + day: 4, + hour: 3, + minute: 2, + second: 2, + precision: PrecisionSecond, + }, + } + visibilityArchiver := s.newTestVisibilityArchiver() + URI, err := archiver.NewURI(testBucketURI + "/archive-and-query") + s.NoError(err) + + for i, testData := range precisionTests { + record := archiver.ArchiveVisibilityRequest{ + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: fmt.Sprintf("%s-%d", testRunID, i), + WorkflowTypeName: testWorkflowTypeName, + StartTimestamp: testData.day*int64(time.Hour)*24 + testData.hour*int64(time.Hour) + testData.minute*int64(time.Minute) + testData.second*int64(time.Second), + CloseTimestamp: (testData.day+30)*int64(time.Hour)*24 + testData.hour*int64(time.Hour) + testData.minute*int64(time.Minute) + testData.second*int64(time.Second), + CloseStatus: shared.WorkflowExecutionCloseStatusFailed, + HistoryLength: 101, + } + err := visibilityArchiver.Archive(context.Background(), URI, &record) + s.NoError(err) + } + + request := &archiver.QueryVisibilityRequest{ + DomainID: testDomainID, + PageSize: 100, + Query: "parsed by mockParser", + } + + for i, testData := range precisionTests { + mockParser := NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(&parsedQuery{ + closeTime: common.Int64Ptr((testData.day+30)*int64(time.Hour)*24 + testData.hour*int64(time.Hour) + testData.minute*int64(time.Minute) + testData.second*int64(time.Second)), + searchPrecision: common.StringPtr(testData.precision), + workflowID: common.StringPtr(testWorkflowID), + }, nil).AnyTimes() + visibilityArchiver.queryParser = mockParser + + response, err := visibilityArchiver.Query(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.Len(response.Executions, 2, "Iteration ", i) + + mockParser = NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(&parsedQuery{ + startTime: common.Int64Ptr((testData.day)*int64(time.Hour)*24 + testData.hour*int64(time.Hour) + testData.minute*int64(time.Minute) + testData.second*int64(time.Second)), + searchPrecision: common.StringPtr(testData.precision), + workflowID: common.StringPtr(testWorkflowID), + }, nil).AnyTimes() + visibilityArchiver.queryParser = mockParser + + response, err = visibilityArchiver.Query(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + s.Len(response.Executions, 2, "Iteration ", i) + } +} +func (s *visibilityArchiverSuite) TestArchiveAndQuery() { + visibilityArchiver := s.newTestVisibilityArchiver() + mockParser := NewMockQueryParser(s.controller) + mockParser.EXPECT().Parse(gomock.Any()).Return(&parsedQuery{ + closeTime: common.Int64Ptr(int64(1 * time.Hour)), + searchPrecision: common.StringPtr(PrecisionHour), + workflowID: common.StringPtr(testWorkflowID), + }, nil).AnyTimes() + visibilityArchiver.queryParser = mockParser + URI, err := archiver.NewURI(testBucketURI + "/archive-and-query") + s.NoError(err) + for _, record := range s.visibilityRecords { + err := visibilityArchiver.Archive(context.Background(), URI, (*archiver.ArchiveVisibilityRequest)(record)) + s.NoError(err) + } + + request := &archiver.QueryVisibilityRequest{ + DomainID: testDomainID, + PageSize: 1, + Query: "parsed by mockParser", + } + executions := []*shared.WorkflowExecutionInfo{} + var first = true + for first || request.NextPageToken != nil { + response, err := visibilityArchiver.Query(context.Background(), URI, request) + s.NoError(err) + s.NotNil(response) + executions = append(executions, response.Executions...) + request.NextPageToken = response.NextPageToken + first = false + } + s.Len(executions, 2) + s.Equal(convertToExecutionInfo(s.visibilityRecords[0]), executions[0]) + s.Equal(convertToExecutionInfo(s.visibilityRecords[1]), executions[1]) +} + +func (s *visibilityArchiverSuite) setupVisibilityDirectory() { + s.visibilityRecords = []*visibilityRecord{ + { + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID, + WorkflowTypeName: testWorkflowTypeName, + StartTimestamp: 1, + CloseTimestamp: int64(1 * time.Hour), + CloseStatus: shared.WorkflowExecutionCloseStatusFailed, + HistoryLength: 101, + }, + { + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID + "1", + WorkflowTypeName: testWorkflowTypeName, + StartTimestamp: 1, + CloseTimestamp: int64(1*time.Hour + 30*time.Minute), + CloseStatus: shared.WorkflowExecutionCloseStatusFailed, + HistoryLength: 101, + }, + { + DomainID: testDomainID, + DomainName: testDomainName, + WorkflowID: testWorkflowID, + RunID: testRunID + "1", + WorkflowTypeName: testWorkflowTypeName, + StartTimestamp: 1, + CloseTimestamp: int64(3 * time.Hour), + CloseStatus: shared.WorkflowExecutionCloseStatusFailed, + HistoryLength: 101, + }, + } + visibilityArchiver := s.newTestVisibilityArchiver() + for _, record := range s.visibilityRecords { + s.writeVisibilityRecordForQueryTest(visibilityArchiver, record) + } +} + +func (s *visibilityArchiverSuite) writeVisibilityRecordForQueryTest(visibilityArchiver *visibilityArchiver, record *visibilityRecord) { + err := visibilityArchiver.Archive(context.Background(), s.testArchivalURI, (*archiver.ArchiveVisibilityRequest)(record)) + s.Require().NoError(err) +} diff --git a/common/service/config/config.go b/common/service/config/config.go index a42f5c1917a..2e27948feb6 100644 --- a/common/service/config/config.go +++ b/common/service/config/config.go @@ -330,6 +330,7 @@ type ( // HistoryArchiverProvider contains the config for all history archivers HistoryArchiverProvider struct { Filestore *FilestoreArchiver `yaml:"filestore"` + S3store *S3Archiver `yaml:"s3store"` } // VisibilityArchival contains the config for visibility archival @@ -345,6 +346,7 @@ type ( // VisibilityArchiverProvider contains the config for all visibility archivers VisibilityArchiverProvider struct { Filestore *FilestoreArchiver `yaml:"filestore"` + S3store *S3Archiver `yaml:"s3store"` } // FilestoreArchiver contain the config for filestore archiver @@ -353,6 +355,13 @@ type ( DirMode string `yaml:"dirMode"` } + // S3Archiver contains the config for S3 archiver + S3Archiver struct { + Region string `yaml:"region"` + Endpoint *string `yaml:"endpoint"` + S3ForcePathStyle bool `yaml:"s3ForcePathStyle"` + } + // PublicClient is config for connecting to cadence frontend PublicClient struct { // HostPort is the host port to connect on. Host can be DNS name diff --git a/go.mod b/go.mod index 681a5b2f22b..864ed74f2ad 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/DataDog/zstd v1.4.0 // indirect github.com/Shopify/sarama v1.23.0 github.com/apache/thrift v0.0.0-20161221203622-b2a4d4ae21c7 + github.com/aws/aws-sdk-go v1.25.34 github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3 // indirect github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 github.com/bsm/sarama-cluster v2.1.13+incompatible diff --git a/go.sum b/go.sum index 178e8a72cfe..9354f8f368d 100644 --- a/go.sum +++ b/go.sum @@ -17,6 +17,8 @@ github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/apache/thrift v0.0.0-20161221203622-b2a4d4ae21c7 h1:Fv9bK1Q+ly/ROk4aJsVMeuIwPel4bEnD8EPiI91nZMg= github.com/apache/thrift v0.0.0-20161221203622-b2a4d4ae21c7/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/aws/aws-sdk-go v1.25.34 h1:roL040qe1npx1ToFeXYHOGp/nOpLbcIQHKZ5UeDIyIM= +github.com/aws/aws-sdk-go v1.25.34/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3 h1:wOysYcIdqv3WnvwqFFzrYCFALPED7qkUGaLXu359GSc= github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3/go.mod h1:UMqtWQTnOe4byzwe7Zhwh8f8s+36uszN51sJrSIZlTE= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -129,6 +131,8 @@ github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=