Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 54 additions & 4 deletions src/codelens/flinkSqlProvider.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -221,11 +221,16 @@ describe("codelens/flinkSqlProvider.ts FlinkSqlCodelensProvider", () => {

const codeLenses: CodeLens[] = await provider.provideCodeLenses(fakeDocument);

assert.strictEqual(codeLenses.length, 3);
assert.strictEqual(codeLenses.length, 4);

const poolLens = codeLenses[0];
const dbLens = codeLenses[1];
const resetLens = codeLenses[2];
const submitLens = codeLenses[0];
const poolLens = codeLenses[1];
const dbLens = codeLenses[2];
const resetLens = codeLenses[3];

assert.strictEqual(submitLens.command?.command, "confluent.statements.create");
assert.strictEqual(submitLens.command?.title, "▶️ Submit Statement");
assert.deepStrictEqual(submitLens.command?.arguments, [fakeDocument.uri, pool, undefined]);

assert.strictEqual(dbLens.command?.command, "confluent.document.flinksql.setCCloudDatabase");
assert.strictEqual(dbLens.command?.title, "Set Catalog & Database");
Expand Down Expand Up @@ -296,6 +301,51 @@ describe("codelens/flinkSqlProvider.ts FlinkSqlCodelensProvider", () => {
assert.strictEqual(resetLens.command?.title, "Clear Settings");
assert.deepStrictEqual(resetLens.command?.arguments, [fakeDocument.uri]);
});

it("should provide 'Submit Statement' codelens when only a compute pool is set (without database)", async () => {
const pool: CCloudFlinkComputePool = TEST_CCLOUD_FLINK_COMPUTE_POOL;
// simulate stored compute pool metadata (but no database)
resourceManagerStub.getUriMetadata.resolves({
[UriMetadataKeys.FLINK_COMPUTE_POOL_ID]: pool.id,
// no database metadata set
});
const envWithPool: CCloudEnvironment = new CCloudEnvironment({
...TEST_CCLOUD_ENVIRONMENT,
flinkComputePools: [pool],
});
ccloudLoaderStub.getEnvironments.resolves([envWithPool]);

const codeLenses: CodeLens[] = await provider.provideCodeLenses(fakeDocument);

assert.strictEqual(codeLenses.length, 4);

const submitLens = codeLenses[0];
const poolLens = codeLenses[1];
const dbLens = codeLenses[2];
const resetLens = codeLenses[3];

assert.strictEqual(submitLens.command?.command, "confluent.statements.create");
assert.strictEqual(submitLens.command?.title, "▶️ Submit Statement");
assert.deepStrictEqual(submitLens.command?.arguments, [fakeDocument.uri, pool, undefined]);

assert.strictEqual(
poolLens.command?.command,
"confluent.document.flinksql.setCCloudComputePool",
);
assert.strictEqual(poolLens.command?.title, pool.name);
assert.deepStrictEqual(poolLens.command?.arguments, [fakeDocument.uri, undefined]);

assert.strictEqual(dbLens.command?.command, "confluent.document.flinksql.setCCloudDatabase");
assert.strictEqual(dbLens.command?.title, "Set Catalog & Database");
assert.deepStrictEqual(dbLens.command?.arguments, [fakeDocument.uri, pool]);

assert.strictEqual(
resetLens.command?.command,
"confluent.document.flinksql.resetCCloudMetadata",
);
assert.strictEqual(resetLens.command?.title, "Clear Settings");
assert.deepStrictEqual(resetLens.command?.arguments, [fakeDocument.uri]);
});
});

describe("codelens/flinkSqlProvider.ts getComputePoolFromMetadata()", () => {
Expand Down
4 changes: 2 additions & 2 deletions src/codelens/flinkSqlProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ export class FlinkSqlCodelensProvider extends DisposableCollection implements Co
};
const resetLens = new CodeLens(range, resetCommand);

if (computePool && database) {
if (computePool) {
const submitCommand: Command = {
title: "▶️ Submit Statement",
command: "confluent.statements.create",
Expand All @@ -151,7 +151,7 @@ export class FlinkSqlCodelensProvider extends DisposableCollection implements Co
// show the "Submit Statement" | <current pool> | <current catalog+db> codelenses
codeLenses.push(submitLens, computePoolLens, databaseLens, resetLens);
} else {
// don't show the submit codelens if we don't have a compute pool and database
// don't show the submit codelens if we don't have a compute pool
codeLenses.push(computePoolLens, databaseLens, resetLens);
}

Expand Down
12 changes: 5 additions & 7 deletions src/commands/flinkStatements.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ export async function viewStatementSqlCommand(statement: FlinkStatement): Promis
* 2) Create **statement name** (auto-generated from template pattern, but user can override)
* 3) (If no `pool` is passed): show a quickpick to **choose a Flink compute pool** to send the statement
* 4) (if no `database` is passed): show a quickpick to **choose a database** (Kafka cluster) to
* submit along with the **catalog name** (the environment, inferable from the chosen database).
* submit along with the **catalog name** (the environment, inferable from the chosen database),
* user can choose to skip this, their query will only work if the table name is fully qualified.
* 5) Submit!
* 6) Show error notification for any submission errors.
* 7) Refresh the statements view if the view is focused on the chosen compute pool.
Expand Down Expand Up @@ -121,11 +122,8 @@ export async function submitFlinkStatementCommand(
const currentDatabaseKafkaCluster: KafkaCluster | undefined = validDatabaseProvided
? database
: await flinkDatabaseQuickpick(computePool);
if (!currentDatabaseKafkaCluster) {
funcLogger.debug("User canceled the default database quickpick");
return;
}
const currentDatabase = currentDatabaseKafkaCluster.name;
// Database selection is now optional - user can dismiss the quickpick and submit without it
const currentDatabase = currentDatabaseKafkaCluster?.name;

// 5. Prep to submit, submit.
const submission: IFlinkStatementSubmitParameters = {
Expand All @@ -135,7 +133,7 @@ export async function submitFlinkStatementCommand(
hidden: false, // Do not create a hidden statement, the user authored it.
properties: new FlinkSpecProperties({
currentDatabase,
currentCatalog: currentDatabaseKafkaCluster.environmentId,
currentCatalog: currentDatabaseKafkaCluster?.environmentId,
localTimezone: localTimezoneOffset(),
}),
};
Expand Down
13 changes: 9 additions & 4 deletions src/quickpicks/kafkaClusters.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -220,12 +220,17 @@ describe("flinkDatabaseQuickPick", () => {
await flinkDatabaseQuickpick(computePool);
const itemsCalledWith = showQuickPickStub.getCall(0).args[0];
// one separator for the single environment, one for the single cloud
// cluster in same provider/region.
assert.strictEqual(itemsCalledWith.length, 2);
// cluster in same provider/region, plus separator with message and skip option
assert.strictEqual(itemsCalledWith.length, 4);
assert.strictEqual(itemsCalledWith[0].kind, QuickPickItemKind.Separator);
// other two items are the clusters. Their description should be the id.
// and their .value should be the cluster.
// cluster item should be the second item (after environment separator)
assert.strictEqual(itemsCalledWith[1].description, ccloudClusters[0].id);
assert.strictEqual(itemsCalledWith[1].value, TEST_CCLOUD_KAFKA_CLUSTER);
// third item should be the separator before skip with message
assert.strictEqual(itemsCalledWith[2].kind, QuickPickItemKind.Separator);
assert.strictEqual(itemsCalledWith[2].label, "Must use fully qualified table name");
// fourth item should be the skip option
assert.strictEqual(itemsCalledWith[3].label, "Skip");
assert.strictEqual(itemsCalledWith[3].value, null);
});
});
19 changes: 18 additions & 1 deletion src/quickpicks/kafkaClusters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@
placeHolder?: string;
/** Function to filter the list of Kafka clusters before making quickpick items. */
filter?: KafkaClusterFilter;
/** When true, adds a "Skip" option at the bottom of the list that returns undefined when selected */
allowSkip?: boolean;
};

/**
Expand All @@ -56,7 +58,7 @@
* ---------------------------------- Other: directEnv1
* direct-cluster1 (direct-cluster-id1)
*/
export async function kafkaClusterQuickPick(

Check failure on line 61 in src/quickpicks/kafkaClusters.ts

View check run for this annotation

SonarQube-Confluent / vscode Sonarqube Results

src/quickpicks/kafkaClusters.ts#L61

Refactor this function to reduce its Cognitive Complexity from 29 to the 15 allowed.
options: KafkaClusterQuickPickOptions = {},
): Promise<KafkaCluster | undefined> {
const environments: Environment[] = [];
Expand Down Expand Up @@ -157,6 +159,20 @@
});
}

// Add skip option at the bottom if requested
if (options.allowSkip) {
clusterItems.push({
kind: QuickPickItemKind.Separator,
label: "Must use fully qualified table name",
});
clusterItems.push({
label: "Skip",
description: "Continue without selecting a database",
iconPath: new ThemeIcon("testing-skipped-icon"),
value: null as any,
Copy link

Copilot AI Aug 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using null as any bypasses TypeScript's type safety. Consider defining the type properly or using undefined consistently with the return type KafkaCluster | undefined."

Suggested change
value: null as any,
value: undefined,

Copilot uses AI. Check for mistakes.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@derek1ee I disagree with Copilot here and would rather we use null explicitly so the extension can treat it as "user intentionally chose to skip" rather than the current cancellation treatment.

});
}

// prompt the user to select a Kafka Cluster
const chosenClusterItem: QuickPickItemWithValue<KafkaCluster> | undefined =
await window.showQuickPick(clusterItems, {
Expand All @@ -171,14 +187,15 @@
* @param computePool The compute pool to use as the context for the quickpick (limits to clusters in same cloud provider/region).
* @param placeholder Optionally override the placeholder text for the quickpick.
* Defaults to "Select the Kafka cluster to use as the default database for the statement".
* @returns chosen Kafka cluster or undefined if the user cancels the quickpick.
* @returns chosen Kafka cluster or undefined if the user cancels the quickpick or selects "Skip".
Copy link

Copilot AI Aug 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The JSDoc comment update is inconsistent with the actual return behavior. When "Skip" is selected, the function returns null (due to value: null as any), but the documentation says it returns undefined."

Copilot uses AI. Check for mistakes.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
* @returns chosen Kafka cluster or undefined if the user cancels the quickpick or selects "Skip".
* @returns chosen Kafka cluster, `null` if the user choose "Skip" (for Flink SQL statements), or undefined if the user cancels

*/
export async function flinkDatabaseQuickpick(
computePool: CCloudFlinkComputePool,
placeholder: string = "Select the Kafka cluster to use as the default database for the statement",
): Promise<KafkaCluster | undefined> {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should update the return type of kafkaClusterQuickPick() as well as here:

Suggested change
): Promise<KafkaCluster | undefined> {
): Promise<KafkaCluster | null | undefined> {

return await kafkaClusterQuickPick({
placeHolder: placeholder,
allowSkip: true,
Copy link
Contributor

@shouples shouples Aug 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's also add allowSkip as a parameter to the flinkDatabaseQuickpick() function so it can be passed through from any callers. Currently, any callers of flinkDatabaseQuickpick() (like setting Flink defaults for the language server integration) will show the option to skip, and we only want this for the codelens implementation and setting document metadata, right?

filter: (cluster: KafkaCluster) => {
if (!isCCloud(cluster)) {
// Only CCloud clusters are supported for Flink compute pools.
Expand Down