Skip to content

Commit 666e25b

Browse files
committed
Merge branch 'main' of github.com:databricks/cli into config-sync-new-defaults
2 parents 854c38d + 78852ba commit 666e25b

File tree

12 files changed

+96
-16
lines changed

12 files changed

+96
-16
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ generate-direct-clean:
183183
bundle/direct/dresources/apitypes.generated.yml: ./bundle/direct/tools/generate_apitypes.py .codegen/openapi.json acceptance/bundle/refschema/out.fields.txt
184184
python3 $^ > $@
185185

186-
bundle/direct/dresources/resources.generated.yml: ./bundle/direct/tools/generate_resources.py .codegen/openapi.json bundle/direct/dresources/apitypes.generated.yml acceptance/bundle/refschema/out.fields.txt
186+
bundle/direct/dresources/resources.generated.yml: ./bundle/direct/tools/generate_resources.py .codegen/openapi.json bundle/direct/dresources/apitypes.generated.yml bundle/direct/dresources/apitypes.yml acceptance/bundle/refschema/out.fields.txt
187187
python3 $^ > $@
188188

189189
.PHONY: lint lintfull tidy lintcheck fmt fmtfull test test-unit test-acc test-slow test-slow-unit test-slow-acc cover showcover build snapshot snapshot-release schema integration integration-short acc-cover acc-showcover docs ws wsfix links checks test-update test-update-templates generate-out-test-toml test-update-aws test-update-all generate-validation

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
### CLI
88

99
* Add Lakebase Autoscaling support to `psql` command ([#4399](https://github.com/databricks/cli/pull/4399))
10+
* Add `workspace_id` to the profiles command ([#4404](https://github.com/databricks/cli/pull/4404))
1011

1112
### Bundles
1213

acceptance/cmd/auth/profiles/out.test.toml

Lines changed: 5 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
2+
=== Profiles with workspace_id (JSON output)
3+
{
4+
"profiles": [
5+
{
6+
"name":"workspace-profile",
7+
"host":"https://test.cloud.databricks.com",
8+
"cloud":"aws",
9+
"auth_type":"",
10+
"valid":false
11+
},
12+
{
13+
"name":"account-profile",
14+
"host":"https://accounts.cloud.databricks.com",
15+
"account_id":"test-account-123",
16+
"cloud":"aws",
17+
"auth_type":"",
18+
"valid":false
19+
},
20+
{
21+
"name":"unified-profile",
22+
"host":"https://unified.databricks.com",
23+
"account_id":"unified-account-456",
24+
"workspace_id":"[NUMID]",
25+
"cloud":"aws",
26+
"auth_type":"",
27+
"valid":false
28+
}
29+
]
30+
}
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
sethome "./home"
2+
3+
# Create profiles including one with workspace_id
4+
cat > "./home/.databrickscfg" <<EOF
5+
[workspace-profile]
6+
host = https://test.cloud.databricks.com
7+
8+
[account-profile]
9+
host = https://accounts.cloud.databricks.com
10+
account_id = test-account-123
11+
12+
[unified-profile]
13+
host = https://unified.databricks.com
14+
account_id = unified-account-456
15+
workspace_id = 987654321
16+
experimental_is_unified_host = true
17+
EOF
18+
19+
title "Profiles with workspace_id (JSON output)\n"
20+
$CLI auth profiles --skip-validate --output json
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Ignore = [
2+
"home"
3+
]

bundle/direct/dresources/apitypes.generated.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ alerts: sql.AlertV2
44

55
apps: apps.App
66

7+
catalogs: catalog.CreateCatalog
8+
79
clusters: compute.ClusterSpec
810

911
dashboards: dashboards.Dashboard
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Override apitypes.generated.yml here.
2+
# This file is not auto-generated and can be manually edited.
3+
#
4+
# Set a value to null to remove a type:
5+
# jobs: null

bundle/direct/dresources/resources.generated.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,8 @@ resources:
4141
- updater
4242
- url
4343

44+
# catalogs: no api field behaviors
45+
4446
# clusters: no api field behaviors
4547

4648
dashboards:

bundle/direct/tools/generate_resources.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,19 @@
1212
import yaml
1313

1414

15-
def parse_apitypes(path):
16-
"""Parse apitypes.generated.yml to get resource types."""
17-
data = yaml.safe_load(path.read_text())
18-
return {resource: type_name for resource, type_name in data.items() if type_name}
15+
def parse_apitypes(generated_path, override_path):
16+
"""Parse apitypes.generated.yml and override with apitypes.yml."""
17+
result = yaml.safe_load(generated_path.read_text()) or {}
18+
19+
# Override with non-generated apitypes.yml (null values remove entries)
20+
override_data = yaml.safe_load(override_path.read_text()) or {}
21+
for resource, type_name in override_data.items():
22+
if type_name:
23+
result[resource] = type_name
24+
else:
25+
result.pop(resource, None)
26+
27+
return result
1928

2029

2130
def parse_out_fields(path):
@@ -139,11 +148,11 @@ def main():
139148
parser = argparse.ArgumentParser(description="Generate resources YAML from OpenAPI schema")
140149
parser.add_argument("apischema", type=Path, help="Path to OpenAPI schema JSON file")
141150
parser.add_argument("apitypes", type=Path, help="Path to apitypes.generated.yml file")
142-
# TODO: add non-generated apitypes.yml here once the need to override generated ones arises
151+
parser.add_argument("apitypes_override", type=Path, help="Path to apitypes.yml override file")
143152
parser.add_argument("out_fields", type=Path, help="Path to out.fields.txt file")
144153
args = parser.parse_args()
145154

146-
resource_types = parse_apitypes(args.apitypes)
155+
resource_types = parse_apitypes(args.apitypes, args.apitypes_override)
147156
state_fields = parse_out_fields(args.out_fields)
148157
schemas = json.loads(args.apischema.read_text()).get("components", {}).get("schemas", {})
149158

0 commit comments

Comments
 (0)