Skip to content

Commit

Permalink
added s3-force-path-style to choose whether to force path style URLs …
Browse files Browse the repository at this point in the history
…for S3 objects (#1206)
  • Loading branch information
chaudhryfaisal authored Mar 25, 2021
1 parent fb4f23a commit dce3d62
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 4 deletions.
6 changes: 6 additions & 0 deletions sparkctl/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,12 @@ If you want to use custom S3 endpoint or region, add `--upload-to-endpoint` and
$ sparkctl create <path to YAML file> --upload-to-endpoint https://<endpoint-url> --upload-to-region <endpoint-region> --upload-to s3://<bucket>
```

If you want to force path style URLs for S3 objects add `--s3-force-path-style`:

```bash
$ sparkctl create <path to YAML file> --s3-force-path-style
```

If you want to make uploaded dependencies publicly available, add `--public` to the `create` command, as the following example shows:

```bash
Expand Down
5 changes: 4 additions & 1 deletion sparkctl/cmd/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ var UploadToPath string
var UploadToEndpoint string
var UploadToRegion string
var Public bool
var S3ForcePathStyle bool
var Override bool
var From string

Expand Down Expand Up @@ -99,6 +100,8 @@ func init() {
"https://storage.googleapis.com", "the GCS or S3 storage api endpoint url")
createCmd.Flags().BoolVarP(&Public, "public", "c", false,
"whether to make uploaded files publicly available")
createCmd.Flags().BoolVarP(&S3ForcePathStyle, "s3-force-path-style", "p", false,
"whether to force path style URLs for S3 objects")
createCmd.Flags().BoolVarP(&Override, "override", "o", false,
"whether to override remote files with the same names")
createCmd.Flags().StringVarP(&From, "from", "f", "",
Expand Down Expand Up @@ -381,7 +384,7 @@ func uploadLocalDependencies(app *v1beta2.SparkApplication, files []string) ([]s
case "gs":
uh, err = newGCSBlob(ctx, uploadBucket, UploadToEndpoint, UploadToRegion)
case "s3":
uh, err = newS3Blob(ctx, uploadBucket, UploadToEndpoint, UploadToRegion)
uh, err = newS3Blob(ctx, uploadBucket, UploadToEndpoint, UploadToRegion, S3ForcePathStyle)
default:
return nil, fmt.Errorf("unsupported upload location URL scheme: %s", uploadLocationUrl.Scheme)
}
Expand Down
8 changes: 5 additions & 3 deletions sparkctl/cmd/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,16 @@ func newS3Blob(
ctx context.Context,
bucket string,
endpoint string,
region string) (*uploadHandler, error) {
region string,
forcePathStyle bool) (*uploadHandler, error) {
// AWS SDK does require specifying regions, thus set it to default S3 region
if region == "" {
region = "us-east1"
}
c := &aws.Config{
Region: aws.String(region),
Endpoint: aws.String(endpoint),
Region: aws.String(region),
Endpoint: aws.String(endpoint),
S3ForcePathStyle: aws.Bool(forcePathStyle),
}
sess := session.Must(session.NewSession(c))
b, err := s3blob.OpenBucket(ctx, sess, bucket)
Expand Down

0 comments on commit dce3d62

Please sign in to comment.