Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

docs: removed mention of "hybrid" or "dedicated" clusters #1838

Merged
merged 2 commits into from
Mar 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/commands/rhoas_kafka_create.md

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pkg/cmd/kafka/create/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ func promptKafkaPayload(opts *options, constants *remote.DynamicServiceConstants
var enterpriseQuota accountmgmtutil.QuotaSpec
if opts.useEnterpriseCluster {
if len(orgQuota.EnterpriseQuotas) < 1 {
return nil, opts.f.Localizer.MustLocalizeError("kafka.create.error.noEnterpriseQuota")
return nil, opts.f.Localizer.MustLocalizeError("kafka.create.error.noStandardQuota")
}

enterpriseQuota = orgQuota.EnterpriseQuotas[0]
Expand Down
34 changes: 17 additions & 17 deletions pkg/cmd/kafka/list/list.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@ import (

// row is the details of a Kafka instance needed to print to a table
type kafkaRow struct {
ID string `json:"id" header:"ID"`
Name string `json:"name" header:"Name"`
Owner string `json:"owner" header:"Owner"`
Status string `json:"status" header:"Status"`
CloudProvider string `json:"cloud_provider" header:"Cloud Provider"`
Region string `json:"region" header:"Region"`
CustomerCloud string `json:"customer_cloud" header:"Customer Cloud"`
ID string `json:"id" header:"ID"`
Name string `json:"name" header:"Name"`
Owner string `json:"owner" header:"Owner"`
Status string `json:"status" header:"Status"`
CloudProvider string `json:"cloud_provider" header:"Cloud Provider"`
Region string `json:"region" header:"Region"`
OpenshiftCluster string `json:"openshift_cluster" header:"Openshift Cluster"`
}

type options struct {
Expand Down Expand Up @@ -160,22 +160,22 @@ func mapResponseItemsToRows(opts *options, kafkas []kafkamgmtclient.KafkaRequest
name = fmt.Sprintf("%s %s", name, icon.Emoji("✔", "(current)"))
}

var customerCloud string
var openshiftCluster string
if id, ok := k.GetClusterIdOk(); ok {
cluster := (*clusterIdMap)[*id]
customerCloud = fmt.Sprintf("%v (%v)", cluster.Name(), cluster.ID())
openshiftCluster = fmt.Sprintf("%v (%v)", cluster.Name(), cluster.ID())
} else {
customerCloud = opts.f.Localizer.MustLocalize("kafka.list.output.customerCloud.redhat")
openshiftCluster = opts.f.Localizer.MustLocalize("kafka.list.output.openshiftCluster.redhat")
}

row := kafkaRow{
ID: k.GetId(),
Name: name,
Owner: k.GetOwner(),
Status: k.GetStatus(),
CloudProvider: k.GetCloudProvider(),
Region: k.GetRegion(),
CustomerCloud: customerCloud,
ID: k.GetId(),
Name: name,
Owner: k.GetOwner(),
Status: k.GetStatus(),
CloudProvider: k.GetCloudProvider(),
Region: k.GetRegion(),
OpenshiftCluster: openshiftCluster,
}

rows[i] = row
Expand Down
14 changes: 7 additions & 7 deletions pkg/core/localize/locales/en/cmd/dedicated.en.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
[dedicated.deregisterCluster.cmd.shortDescription]
one = 'Deregister a Hybrid OpenShift cluster from use with Red Hat OpenShift Streams for Apache Kafka'
one = 'Deregister a OpenShift cluster from use with Red Hat OpenShift Streams for Apache Kafka'

[dedicated.deregisterCluster.cmd.longDescription]
one = '''
Removes the ability to provision your own Kafka instances on your OpenShift cluster, this command will deregister your
Hybrid OpenShift cluster from use with Red Hat OpenShift Streams for Apache Kafka.
OpenShift cluster from use with Red Hat OpenShift Streams for Apache Kafka.
'''

[dedicated.deregisterCluster.cmd.example]
Expand Down Expand Up @@ -93,7 +93,7 @@ There will be N/3 streaming units in your Kafka cluster, where N is the machine
one = 'Using the valid machine pool:'

[dedicated.cmd.shortDescription]
one = 'Manage your Hybrid OpenShift clusters which host your Kafka instances'
one = 'Manage your OpenShift clusters which host your Kafka instances'

[dedicated.cmd.longDescription]
one = '''
Expand All @@ -114,7 +114,7 @@ terraforming your cluster for use with your Kafka instances.
'''

[dedicated.deregisterCluster.kafka.delete.warning]
one = 'To deregister a Hybrid OpenShift cluster all Kafka instances must be deleted'
one = 'To deregister a OpenShift cluster all Kafka instances must be deleted'

[dedicated.deregisterCluster.kafka.delete.failed]
one = 'There was an unexpected error when deleting the Kafka instance'
Expand All @@ -129,10 +129,10 @@ one = 'The API URL of the OpenShift Cluster Management API'
one = 'The access token to use to authenticate with the OpenShift Cluster Management API'

[dedicated.registerCluster.flag.pageNumber.description]
one = 'The page number to use when listing Hybrid OpenShift clusters'
one = 'The page number to use when listing OpenShift clusters'

[dedicated.registerCluster.flag.pageSize.description]
one = 'The page size to use when listing Hybrid OpenShift clusters'
one = 'The page size to use when listing OpenShift clusters'

[dedicated.list.cmd.shortDescription]
one = 'List all OpenShift clusters registered with Red Hat OpenShift Streams for Apache Kafka'
Expand All @@ -153,7 +153,7 @@ rhoas dedicated list
one = 'No registered OpenShift clusters found'

[dedicated.list.error.permissionDenied]
one = 'You do not have permissions to list Hybrid clusters'
one = 'You do not have permissions to list clusters'

[dedicated.deregisterCluster.error.403]
one = 'You do not have permissions to deregister this cluster'
Expand Down
10 changes: 5 additions & 5 deletions pkg/core/localize/locales/en/cmd/kafka.en.toml
Original file line number Diff line number Diff line change
Expand Up @@ -435,8 +435,8 @@ one = 'no marketplace quota available for given provider'
[kafka.create.error.notEnoughQuota]
one = 'you do not have enough quota to create a kafka of size {{.Size}}'

[kafka.create.error.noEnterpriseQuota]
one = 'you do not have any enterprise quota availble'
[kafka.create.error.noStandardQuota]
one = 'you do not have any standard quota availble'

[kafka.create.error.noQuotaLeft]
one = 'you have no remaining quoata to create a Kafka instance'
Expand Down Expand Up @@ -477,7 +477,7 @@ one = 'no marketplace quota found with the specified marketplace provider and ac
one = 'multiple cloud accounts found, please specify "--marketplace" and "--marketplace-account-id"'

[kafka.create.provider.error.onlyEnterpriseQuota]
one = 'there is only quota availble for enterprise, cannot use Red Hat infrastructure'
one = 'there is only quota availble for standard Kafka instances, cannot use legacy Red Hat infrastructure'

[kafka.delete.cmd.shortDescription]
description = "Short description for command"
Expand Down Expand Up @@ -677,7 +677,7 @@ one = 'Text search to filter the Kafka instances by name, owner, cloud_provider,
description = 'Debug message when filtering the list of Kafka instances'
one = 'Filtering Kafka instances with the query "{{.Search}}"'

[kafka.list.output.customerCloud.redhat]
[kafka.list.output.openshiftCluster.redhat]
one = 'Red Hat Infrastructure'

[kafka.topic.common.flag.name.description]
Expand Down Expand Up @@ -1197,7 +1197,7 @@ one = 'ID of the Kafka instance owner'

[kafka.create.flag.clusterId.description]
description = 'Description for the --cluster-id flag'
one = 'ID of the Customer-Cloud data plane cluster to create the Kafka instance on.'
one = 'ID of the Openshift Cluster data plane to create the Kafka instance on.'

[kafka.create.input.cluster.selectClusterMessage]
one = 'Please select the cluster to provision your Kafka Instance on:'
Expand Down