Skip to content

Commit

Permalink
feat(strm-764): remove sink commands
Browse files Browse the repository at this point in the history
All commands related to sinks have been removed, as well as any lingering references to the concept.

BREAKING CHANGE: all sink commands have been removed, use the data-connector commands instead
  • Loading branch information
ivan-p92 committed Apr 13, 2022
1 parent aebb56e commit 779e0b7
Show file tree
Hide file tree
Showing 14 changed files with 13 additions and 626 deletions.
2 changes: 0 additions & 2 deletions pkg/bootstrap/bootstrap.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
"strmprivacy/strm/pkg/entity/key_stream"
"strmprivacy/strm/pkg/entity/schema"
"strmprivacy/strm/pkg/entity/schema_code"
"strmprivacy/strm/pkg/entity/sink"
"strmprivacy/strm/pkg/entity/stream"
"strmprivacy/strm/pkg/entity/usage"
"strmprivacy/strm/pkg/util"
Expand Down Expand Up @@ -55,7 +54,6 @@ func SetupServiceClients(accessToken *string) {
kafka_exporter.SetupClient(clientConnection, ctx)
batch_exporter.SetupClient(clientConnection, ctx)
batch_job.SetupClient(clientConnection, ctx)
sink.SetupClient(clientConnection, ctx)
data_connector.SetupClient(clientConnection, ctx)
kafka_cluster.SetupClient(clientConnection, ctx)
kafka_user.SetupClient(clientConnection, ctx)
Expand Down
2 changes: 0 additions & 2 deletions pkg/cmd/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import (
"strmprivacy/strm/pkg/entity/kafka_exporter"
"strmprivacy/strm/pkg/entity/kafka_user"
"strmprivacy/strm/pkg/entity/schema"
"strmprivacy/strm/pkg/entity/sink"
"strmprivacy/strm/pkg/entity/stream"
)

Expand All @@ -23,7 +22,6 @@ var CreateCmd = &cobra.Command{

func init() {
CreateCmd.AddCommand(stream.CreateCmd())
CreateCmd.AddCommand(sink.CreateCmd())
CreateCmd.AddCommand(data_connector.CreateCmd())
CreateCmd.AddCommand(batch_exporter.CreateCmd())
CreateCmd.AddCommand(batch_job.CreateCmd())
Expand Down
2 changes: 0 additions & 2 deletions pkg/cmd/delete.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import (
"strmprivacy/strm/pkg/entity/kafka_exporter"
"strmprivacy/strm/pkg/entity/kafka_user"
"strmprivacy/strm/pkg/entity/schema"
"strmprivacy/strm/pkg/entity/sink"
"strmprivacy/strm/pkg/entity/stream"
)

Expand All @@ -26,7 +25,6 @@ func init() {
DeleteCmd.AddCommand(kafka_exporter.DeleteCmd())
DeleteCmd.AddCommand(batch_exporter.DeleteCmd())
DeleteCmd.AddCommand(batch_job.DeleteCmd())
DeleteCmd.AddCommand(sink.DeleteCmd())
DeleteCmd.AddCommand(data_connector.DeleteCmd())
DeleteCmd.AddCommand(kafka_user.DeleteCmd())
DeleteCmd.AddCommand(event_contract.DeleteCmd())
Expand Down
2 changes: 0 additions & 2 deletions pkg/cmd/get.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import (
"strmprivacy/strm/pkg/entity/key_stream"
"strmprivacy/strm/pkg/entity/schema"
"strmprivacy/strm/pkg/entity/schema_code"
"strmprivacy/strm/pkg/entity/sink"
"strmprivacy/strm/pkg/entity/stream"
"strmprivacy/strm/pkg/entity/usage"
)
Expand All @@ -30,7 +29,6 @@ func init() {
GetCmd.AddCommand(kafka_exporter.GetCmd())
GetCmd.AddCommand(batch_exporter.GetCmd())
GetCmd.AddCommand(batch_job.GetCmd())
GetCmd.AddCommand(sink.GetCmd())
GetCmd.AddCommand(data_connector.GetCmd())
GetCmd.AddCommand(kafka_cluster.GetCmd())
GetCmd.AddCommand(kafka_user.GetCmd())
Expand Down
2 changes: 0 additions & 2 deletions pkg/cmd/list.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import (
"strmprivacy/strm/pkg/entity/kafka_user"
"strmprivacy/strm/pkg/entity/key_stream"
"strmprivacy/strm/pkg/entity/schema"
"strmprivacy/strm/pkg/entity/sink"
"strmprivacy/strm/pkg/entity/stream"
)

Expand All @@ -28,7 +27,6 @@ func init() {
ListCmd.AddCommand(kafka_exporter.ListCmd())
ListCmd.AddCommand(batch_exporter.ListCmd())
ListCmd.AddCommand(batch_job.ListCmd())
ListCmd.AddCommand(sink.ListCmd())
ListCmd.AddCommand(data_connector.ListCmd())
ListCmd.AddCommand(kafka_cluster.ListCmd())
ListCmd.AddCommand(kafka_user.ListCmd())
Expand Down
4 changes: 2 additions & 2 deletions pkg/entity/batch_exporter/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ A Batch Exporter listens to a stream and writes all events to files using a Data
Each file follows the JSON Lines format, which is one full JSON document per line.
A [Data Connector](/cli-reference/` + fmt.Sprint(common.RootCommandName) + `/create/data-connector.md) is a configuration
entity that comprises location (GCS bucket, AWS S3 bucket, ...) and associated credentials.
entity that comprises a location (GCS bucket, AWS S3 bucket, ...) and associated credentials.
A Data Connector needs to be created *before* you can create a batch exporter that uses it.
A Data Connector must be created *before* you can create a batch exporter that uses it.
### Usage
`
Expand Down
2 changes: 1 addition & 1 deletion pkg/entity/batch_job/batch_job.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ func del(id *string) {

func create(cmd *cobra.Command) {
flags := cmd.Flags()
batchJobFile := util.GetStringAndErr(flags, batch_jobs_file_flag_name)
batchJobFile := util.GetStringAndErr(flags, batchJobsFileFlagName)

batchJobData, err := ioutil.ReadFile(batchJobFile)
if err != nil {
Expand Down
15 changes: 8 additions & 7 deletions pkg/entity/batch_job/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,19 @@ import (
)

const (
batch_jobs_file_flag_name = "file"
batchJobsFileFlagName = "file"
)

var longDoc = `
A Batch Job outputs all events in file all events to files in a Sink. This happens with a regular interval.
A Batch Job reads all events from a Data Connector and writes them to one or more Data Connectors,
applying our privacy algorithm as defined by the job's configuration file.
Each file follows the JSON Lines format, which is one full JSON document per line.
A [sink](/cli-reference/` + fmt.Sprint(common.RootCommandName) + `/create/sink.md) is a configuration item that defines location
(Gcloud, AWS, ..) bucket and associated credentials.
A [Data Connector](/cli-reference/` + fmt.Sprint(common.RootCommandName) + `/create/data-connector.md) is a configuration
entity that comprises a location (GCS bucket, AWS S3 bucket, ...) and associated credentials.
A sink needs to be created *before* you can create a batch job that uses it
A Data Connector must be created *before* you can create a batch job that uses it.
### Usage
`
Expand Down Expand Up @@ -93,9 +94,9 @@ func CreateCmd() *cobra.Command {

flags := batchJob.Flags()

flags.StringP(batch_jobs_file_flag_name, "F", "",
flags.StringP(batchJobsFileFlagName, "F", "",
`The path to the JSON file containing the batch job configuration`)
err := batchJob.MarkFlagRequired(batch_jobs_file_flag_name)
err := batchJob.MarkFlagRequired(batchJobsFileFlagName)
common.CliExit(err)

return batchJob
Expand Down
2 changes: 1 addition & 1 deletion pkg/entity/schema/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ func create(cmd *cobra.Command, args *string) {
typeString := util.GetStringAndErr(flags, schemaTypeFlag)
schemaType, ok := entities.SchemaType_value[typeString]
if !ok {
common.CliExit(fmt.Sprintf("Can't convert %s to a known consent sink type, types are %v",
common.CliExit(fmt.Sprintf("Can't convert %s to a known consent schema type, types are %v",
typeString, entities.SchemaType_value))
}
definitionFilename := util.GetStringAndErr(flags, definitionFlag)
Expand Down
102 changes: 0 additions & 102 deletions pkg/entity/sink/cmd.go

This file was deleted.

Loading

0 comments on commit 779e0b7

Please sign in to comment.