From 5071443b66dd84dbb9a0eafa92e83ce8c7e1c7ea Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 11 Aug 2023 15:02:21 +0200 Subject: [PATCH] Release v0.5.0 * Fixed OAuth M2M corner case in `WorkspaceClient` where `DATABRICKS_ACCOUNT_ID` is present in the environment ([#273](https://github.com/databricks/databricks-sdk-py/pull/273)). * Added `connection_pool_size` configuration property (preview) ([#276](https://github.com/databricks/databricks-sdk-py/pull/276)). --- .codegen/_openapi_sha | 2 +- CHANGELOG.md | 6 + databricks/sdk/service/catalog.py | 117 ++++++++++++------ databricks/sdk/service/compute.py | 38 +++++- databricks/sdk/service/iam.py | 42 +++---- databricks/sdk/service/jobs.py | 30 ++++- databricks/sdk/version.py | 2 +- docs/account/groups.rst | 4 +- docs/account/service_principals.rst | 4 +- docs/account/storage_credentials.rst | 6 +- docs/account/users.rst | 4 +- docs/workspace/clusters.rst | 7 +- docs/workspace/connections.rst | 10 +- docs/workspace/dbfs.rst | 2 +- docs/workspace/groups.rst | 4 +- docs/workspace/instance_pools.rst | 12 +- docs/workspace/jobs.rst | 20 ++- docs/workspace/permissions.rst | 12 +- docs/workspace/service_principals.rst | 4 +- docs/workspace/users.rst | 4 +- .../permissions/set_generic_permissions.py | 12 +- 21 files changed, 227 insertions(+), 115 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 202e5766..0bba438e 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -386b65ecdc825b9c3ed4aa7ca88e2e5baf9d87df \ No newline at end of file +1e3533f94335f0e6c5d9262bc1fea95b3ddcb0e1 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index f5310f35..c3188c83 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Version changelog +## 0.5.0 + +* Fixed OAuth M2M corner case in `WorkspaceClient` where `DATABRICKS_ACCOUNT_ID` is present in the environment ([#273](https://github.com/databricks/databricks-sdk-py/pull/273)). +* Added `connection_pool_size` configuration property (preview) ([#276](https://github.com/databricks/databricks-sdk-py/pull/276)). + + ## 0.4.0 To simplify documentation and management of object permissions, this release features a major reorganization of how permissions APIs are structured in the SDK. Rather than using a single permissions.get() API for all services, each service supporting permissions has its own permissions APIs. Follow these steps to migrate to the current SDK: diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 65f1c887..7a028164 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -91,6 +91,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'AccountsMetastoreInfo': return cls(metastore_info=_from_dict(d, 'metastore_info', MetastoreInfo)) +@dataclass +class AccountsStorageCredentialInfo: + credential_info: Optional['StorageCredentialInfo'] = None + + def as_dict(self) -> dict: + body = {} + if self.credential_info: body['credential_info'] = self.credential_info.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AccountsStorageCredentialInfo': + return cls(credential_info=_from_dict(d, 'credential_info', StorageCredentialInfo)) + + @dataclass class AccountsUpdateMetastore: metastore_id: Optional[str] = None @@ -388,10 +402,13 @@ class ConnectionInfo: full_name: Optional[str] = None metastore_id: Optional[str] = None name: Optional[str] = None - options_kvpairs: Optional['Dict[str,str]'] = None + options: Optional['Dict[str,str]'] = None owner: Optional[str] = None - properties_kvpairs: Optional['Dict[str,str]'] = None + properties: Optional['Dict[str,str]'] = None + provisioning_state: Optional['ProvisioningState'] = None read_only: Optional[bool] = None + securable_kind: Optional['ConnectionInfoSecurableKind'] = None + securable_type: Optional[str] = None updated_at: Optional[int] = None updated_by: Optional[str] = None url: Optional[str] = None @@ -407,10 +424,13 @@ def as_dict(self) -> dict: if self.full_name is not None: body['full_name'] = self.full_name if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name - if self.options_kvpairs: body['options_kvpairs'] = self.options_kvpairs + if self.options: body['options'] = self.options if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.properties: body['properties'] = self.properties + if self.provisioning_state is not None: body['provisioning_state'] = self.provisioning_state.value if self.read_only is not None: body['read_only'] = self.read_only + if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value + if self.securable_type is not None: body['securable_type'] = self.securable_type if self.updated_at is not None: body['updated_at'] = self.updated_at if self.updated_by is not None: body['updated_by'] = self.updated_by if self.url is not None: body['url'] = self.url @@ -427,15 +447,32 @@ def from_dict(cls, d: Dict[str, any]) -> 'ConnectionInfo': full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), - options_kvpairs=d.get('options_kvpairs', None), + options=d.get('options', None), owner=d.get('owner', None), - properties_kvpairs=d.get('properties_kvpairs', None), + properties=d.get('properties', None), + provisioning_state=_enum(d, 'provisioning_state', ProvisioningState), read_only=d.get('read_only', None), + securable_kind=_enum(d, 'securable_kind', ConnectionInfoSecurableKind), + securable_type=d.get('securable_type', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), url=d.get('url', None)) +class ConnectionInfoSecurableKind(Enum): + """Kind of connection securable.""" + + CONNECTION_BIGQUERY = 'CONNECTION_BIGQUERY' + CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS' + CONNECTION_MYSQL = 'CONNECTION_MYSQL' + CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG' + CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL' + CONNECTION_REDSHIFT = 'CONNECTION_REDSHIFT' + CONNECTION_SNOWFLAKE = 'CONNECTION_SNOWFLAKE' + CONNECTION_SQLDW = 'CONNECTION_SQLDW' + CONNECTION_SQLSERVER = 'CONNECTION_SQLSERVER' + + class ConnectionType(Enum): """The type of connection.""" @@ -484,10 +521,10 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateCatalog': class CreateConnection: name: str connection_type: 'ConnectionType' - options_kvpairs: 'Dict[str,str]' + options: 'Dict[str,str]' comment: Optional[str] = None owner: Optional[str] = None - properties_kvpairs: Optional['Dict[str,str]'] = None + properties: Optional['Dict[str,str]'] = None read_only: Optional[bool] = None def as_dict(self) -> dict: @@ -495,9 +532,9 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.connection_type is not None: body['connection_type'] = self.connection_type.value if self.name is not None: body['name'] = self.name - if self.options_kvpairs: body['options_kvpairs'] = self.options_kvpairs + if self.options: body['options'] = self.options if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.properties: body['properties'] = self.properties if self.read_only is not None: body['read_only'] = self.read_only return body @@ -506,9 +543,9 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateConnection': return cls(comment=d.get('comment', None), connection_type=_enum(d, 'connection_type', ConnectionType), name=d.get('name', None), - options_kvpairs=d.get('options_kvpairs', None), + options=d.get('options', None), owner=d.get('owner', None), - properties_kvpairs=d.get('properties_kvpairs', None), + properties=d.get('properties', None), read_only=d.get('read_only', None)) @@ -1761,6 +1798,16 @@ def from_dict(cls, d: Dict[str, any]) -> 'PrivilegeAssignment': PropertiesKvPairs = Dict[str, str] +class ProvisioningState(Enum): + """Status of an asynchronously provisioned resource.""" + + ACTIVE = 'ACTIVE' + DELETING = 'DELETING' + FAILED = 'FAILED' + PROVISIONING = 'PROVISIONING' + STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' + + @dataclass class SchemaInfo: catalog_name: Optional[str] = None @@ -1849,7 +1896,7 @@ class SecurableType(Enum): class SseEncryptionDetails: """Server-Side Encryption properties for clients communicating with AWS s3.""" - algorithm: 'SseEncryptionDetailsAlgorithm' + algorithm: Optional['SseEncryptionDetailsAlgorithm'] = None aws_kms_key_arn: Optional[str] = None def as_dict(self) -> dict: @@ -2189,21 +2236,19 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateCatalog': @dataclass class UpdateConnection: name: str - options_kvpairs: 'Dict[str,str]' + options: 'Dict[str,str]' name_arg: Optional[str] = None def as_dict(self) -> dict: body = {} if self.name is not None: body['name'] = self.name if self.name_arg is not None: body['name_arg'] = self.name_arg - if self.options_kvpairs: body['options_kvpairs'] = self.options_kvpairs + if self.options: body['options'] = self.options return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'UpdateConnection': - return cls(name=d.get('name', None), - name_arg=d.get('name_arg', None), - options_kvpairs=d.get('options_kvpairs', None)) + return cls(name=d.get('name', None), name_arg=d.get('name_arg', None), options=d.get('options', None)) @dataclass @@ -2855,7 +2900,7 @@ def __init__(self, api_client): def create(self, metastore_id: str, *, - credential_info: Optional[CreateStorageCredential] = None) -> StorageCredentialInfo: + credential_info: Optional[CreateStorageCredential] = None) -> AccountsStorageCredentialInfo: """Create a storage credential. Creates a new storage credential. The request object is specific to the cloud: @@ -2870,7 +2915,7 @@ def create(self, Unity Catalog metastore ID :param credential_info: :class:`CreateStorageCredential` (optional) - :returns: :class:`StorageCredentialInfo` + :returns: :class:`AccountsStorageCredentialInfo` """ body = {} if credential_info is not None: body['credential_info'] = credential_info.as_dict() @@ -2879,7 +2924,7 @@ def create(self, 'POST', f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials', body=body) - return StorageCredentialInfo.from_dict(json) + return AccountsStorageCredentialInfo.from_dict(json) def delete(self, metastore_id: str, name: str, *, force: Optional[bool] = None): """Delete a storage credential. @@ -2904,7 +2949,7 @@ def delete(self, metastore_id: str, name: str, *, force: Optional[bool] = None): f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/', query=query) - def get(self, metastore_id: str, name: str) -> StorageCredentialInfo: + def get(self, metastore_id: str, name: str) -> AccountsStorageCredentialInfo: """Gets the named storage credential. Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the @@ -2915,12 +2960,12 @@ def get(self, metastore_id: str, name: str) -> StorageCredentialInfo: :param name: str Name of the storage credential. - :returns: :class:`StorageCredentialInfo` + :returns: :class:`AccountsStorageCredentialInfo` """ json = self._api.do( 'GET', f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/') - return StorageCredentialInfo.from_dict(json) + return AccountsStorageCredentialInfo.from_dict(json) def list(self, metastore_id: str) -> ListStorageCredentialsResponse: """Get all storage credentials assigned to a metastore. @@ -2941,7 +2986,7 @@ def update(self, metastore_id: str, name: str, *, - credential_info: Optional[UpdateStorageCredential] = None) -> StorageCredentialInfo: + credential_info: Optional[UpdateStorageCredential] = None) -> AccountsStorageCredentialInfo: """Updates a storage credential. Updates a storage credential on the metastore. The caller must be the owner of the storage credential. @@ -2953,7 +2998,7 @@ def update(self, Name of the storage credential. :param credential_info: :class:`UpdateStorageCredential` (optional) - :returns: :class:`StorageCredentialInfo` + :returns: :class:`AccountsStorageCredentialInfo` """ body = {} if credential_info is not None: body['credential_info'] = credential_info.as_dict() @@ -2962,7 +3007,7 @@ def update(self, 'PUT', f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/', body=body) - return StorageCredentialInfo.from_dict(json) + return AccountsStorageCredentialInfo.from_dict(json) class CatalogsAPI: @@ -3119,11 +3164,11 @@ def __init__(self, api_client): def create(self, name: str, connection_type: ConnectionType, - options_kvpairs: Dict[str, str], + options: Dict[str, str], *, comment: Optional[str] = None, owner: Optional[str] = None, - properties_kvpairs: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, read_only: Optional[bool] = None) -> ConnectionInfo: """Create a connection. @@ -3136,13 +3181,13 @@ def create(self, Name of the connection. :param connection_type: :class:`ConnectionType` The type of connection. - :param options_kvpairs: Dict[str,str] + :param options: Dict[str,str] A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. :param owner: str (optional) Username of current owner of the connection. - :param properties_kvpairs: Dict[str,str] (optional) + :param properties: Dict[str,str] (optional) An object containing map of key-value properties attached to the connection. :param read_only: bool (optional) If the connection is read only. @@ -3153,9 +3198,9 @@ def create(self, if comment is not None: body['comment'] = comment if connection_type is not None: body['connection_type'] = connection_type.value if name is not None: body['name'] = name - if options_kvpairs is not None: body['options_kvpairs'] = options_kvpairs + if options is not None: body['options'] = options if owner is not None: body['owner'] = owner - if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs + if properties is not None: body['properties'] = properties if read_only is not None: body['read_only'] = read_only json = self._api.do('POST', '/api/2.1/unity-catalog/connections', body=body) @@ -3199,14 +3244,14 @@ def list(self) -> Iterator[ConnectionInfo]: json = self._api.do('GET', '/api/2.1/unity-catalog/connections') return [ConnectionInfo.from_dict(v) for v in json.get('connections', [])] - def update(self, name: str, options_kvpairs: Dict[str, str], name_arg: str) -> ConnectionInfo: + def update(self, name: str, options: Dict[str, str], name_arg: str) -> ConnectionInfo: """Update a connection. Updates the connection that matches the supplied name. :param name: str Name of the connection. - :param options_kvpairs: Dict[str,str] + :param options: Dict[str,str] A map of key-value properties attached to the securable. :param name_arg: str Name of the connection. @@ -3215,7 +3260,7 @@ def update(self, name: str, options_kvpairs: Dict[str, str], name_arg: str) -> C """ body = {} if name is not None: body['name'] = name - if options_kvpairs is not None: body['options_kvpairs'] = options_kvpairs + if options is not None: body['options'] = options json = self._api.do('PATCH', f'/api/2.1/unity-catalog/connections/{name_arg}', body=body) return ConnectionInfo.from_dict(json) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index a4250b81..cfd78042 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -1009,6 +1009,8 @@ class CreateCluster: cluster_name: Optional[str] = None cluster_source: Optional['ClusterSource'] = None custom_tags: Optional['Dict[str,str]'] = None + data_security_mode: Optional['DataSecurityMode'] = None + docker_image: Optional['DockerImage'] = None driver_instance_pool_id: Optional[str] = None driver_node_type_id: Optional[str] = None enable_elastic_disk: Optional[bool] = None @@ -1020,6 +1022,7 @@ class CreateCluster: num_workers: Optional[int] = None policy_id: Optional[str] = None runtime_engine: Optional['RuntimeEngine'] = None + single_user_name: Optional[str] = None spark_conf: Optional['Dict[str,str]'] = None spark_env_vars: Optional['Dict[str,str]'] = None ssh_public_keys: Optional['List[str]'] = None @@ -1038,6 +1041,8 @@ def as_dict(self) -> dict: if self.cluster_name is not None: body['cluster_name'] = self.cluster_name if self.cluster_source is not None: body['cluster_source'] = self.cluster_source.value if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id @@ -1051,6 +1056,7 @@ def as_dict(self) -> dict: if self.num_workers is not None: body['num_workers'] = self.num_workers if self.policy_id is not None: body['policy_id'] = self.policy_id if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name if self.spark_conf: body['spark_conf'] = self.spark_conf if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars if self.spark_version is not None: body['spark_version'] = self.spark_version @@ -1069,6 +1075,8 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateCluster': cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), custom_tags=d.get('custom_tags', None), + data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), + docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), @@ -1080,6 +1088,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateCluster': num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), + single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), @@ -3894,6 +3903,8 @@ def create(self, cluster_name: Optional[str] = None, cluster_source: Optional[ClusterSource] = None, custom_tags: Optional[Dict[str, str]] = None, + data_security_mode: Optional[DataSecurityMode] = None, + docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, @@ -3905,6 +3916,7 @@ def create(self, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, + single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, ssh_public_keys: Optional[List[str]] = None, @@ -3955,6 +3967,9 @@ def create(self, - Currently, Databricks allows at most 45 custom tags - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags + :param data_security_mode: :class:`DataSecurityMode` (optional) + This describes an enum + :param docker_image: :class:`DockerImage` (optional) :param driver_instance_pool_id: str (optional) The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. @@ -3995,6 +4010,8 @@ def create(self, :param runtime_engine: :class:`RuntimeEngine` (optional) Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version. + :param single_user_name: str (optional) + Single user name if data_security_mode is `SINGLE_USER` :param spark_conf: Dict[str,str] (optional) An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via @@ -4032,6 +4049,8 @@ def create(self, if cluster_name is not None: body['cluster_name'] = cluster_name if cluster_source is not None: body['cluster_source'] = cluster_source.value if custom_tags is not None: body['custom_tags'] = custom_tags + if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value + if docker_image is not None: body['docker_image'] = docker_image.as_dict() if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk @@ -4044,6 +4063,7 @@ def create(self, if num_workers is not None: body['num_workers'] = num_workers if policy_id is not None: body['policy_id'] = policy_id if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value + if single_user_name is not None: body['single_user_name'] = single_user_name if spark_conf is not None: body['spark_conf'] = spark_conf if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars if spark_version is not None: body['spark_version'] = spark_version @@ -4067,6 +4087,8 @@ def create_and_wait( cluster_name: Optional[str] = None, cluster_source: Optional[ClusterSource] = None, custom_tags: Optional[Dict[str, str]] = None, + data_security_mode: Optional[DataSecurityMode] = None, + docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, @@ -4078,6 +4100,7 @@ def create_and_wait( num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, + single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, ssh_public_keys: Optional[List[str]] = None, @@ -4092,6 +4115,8 @@ def create_and_wait( cluster_name=cluster_name, cluster_source=cluster_source, custom_tags=custom_tags, + data_security_mode=data_security_mode, + docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, @@ -4103,6 +4128,7 @@ def create_and_wait( num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, + single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, @@ -5233,9 +5259,9 @@ def create(self, :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) - A list of preloaded Spark image versions for the pool. Pool-backed clusters started with the - preloaded Spark version will start faster. A list of available Spark versions can be retrieved by - using the :method:clusters/sparkVersions API call. + A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters + started with the preloaded Spark version will start faster. A list of available Spark versions can + be retrieved by using the :method:clusters/sparkVersions API call. :returns: :class:`CreateInstancePoolResponse` """ @@ -5344,9 +5370,9 @@ def edit(self, :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) - A list of preloaded Spark image versions for the pool. Pool-backed clusters started with the - preloaded Spark version will start faster. A list of available Spark versions can be retrieved by - using the :method:clusters/sparkVersions API call. + A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters + started with the preloaded Spark version will start faster. A list of available Spark versions can + be retrieved by using the :method:clusters/sparkVersions API call. """ diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index f3a1e20e..70904534 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -295,20 +295,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'ObjectPermissions': class PartialUpdate: id: Optional[str] = None operations: Optional['List[Patch]'] = None - schema: Optional['List[PatchSchema]'] = None + schemas: Optional['List[PatchSchema]'] = None def as_dict(self) -> dict: body = {} if self.id is not None: body['id'] = self.id if self.operations: body['Operations'] = [v.as_dict() for v in self.operations] - if self.schema: body['schema'] = [v.value for v in self.schema] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PartialUpdate': return cls(id=d.get('id', None), operations=_repeated(d, 'Operations', Patch), - schema=d.get('schema', None)) + schemas=d.get('schemas', None)) @dataclass @@ -1112,7 +1112,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, - schema: Optional[List[PatchSchema]] = None): + schemas: Optional[List[PatchSchema]] = None): """Update group details. Partially updates the details of a group. @@ -1120,14 +1120,14 @@ def patch(self, :param id: str Unique ID for a group in the Databricks account. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. """ body = {} if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schema is not None: body['schema'] = [v.value for v in schema] + if schemas is not None: body['schemas'] = [v.value for v in schemas] self._api.do('PATCH', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}', body=body) def update(self, @@ -1304,7 +1304,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, - schema: Optional[List[PatchSchema]] = None): + schemas: Optional[List[PatchSchema]] = None): """Update service principal details. Partially updates the details of a single service principal in the Databricks account. @@ -1312,14 +1312,14 @@ def patch(self, :param id: str Unique ID for a service principal in the Databricks account. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. """ body = {} if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schema is not None: body['schema'] = [v.value for v in schema] + if schemas is not None: body['schemas'] = [v.value for v in schemas] self._api.do('PATCH', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}', body=body) @@ -1513,7 +1513,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, - schema: Optional[List[PatchSchema]] = None): + schemas: Optional[List[PatchSchema]] = None): """Update user details. Partially updates a user resource by applying the supplied operations on specific user attributes. @@ -1521,14 +1521,14 @@ def patch(self, :param id: str Unique ID for a user in the Databricks account. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. """ body = {} if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schema is not None: body['schema'] = [v.value for v in schema] + if schemas is not None: body['schemas'] = [v.value for v in schemas] self._api.do('PATCH', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', body=body) def update(self, @@ -1727,7 +1727,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, - schema: Optional[List[PatchSchema]] = None): + schemas: Optional[List[PatchSchema]] = None): """Update group details. Partially updates the details of a group. @@ -1735,14 +1735,14 @@ def patch(self, :param id: str Unique ID for a group in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. """ body = {} if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schema is not None: body['schema'] = [v.value for v in schema] + if schemas is not None: body['schemas'] = [v.value for v in schemas] self._api.do('PATCH', f'/api/2.0/preview/scim/v2/Groups/{id}', body=body) def update(self, @@ -2051,7 +2051,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, - schema: Optional[List[PatchSchema]] = None): + schemas: Optional[List[PatchSchema]] = None): """Update service principal details. Partially updates the details of a single service principal in the Databricks workspace. @@ -2059,14 +2059,14 @@ def patch(self, :param id: str Unique ID for a service principal in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. """ body = {} if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schema is not None: body['schema'] = [v.value for v in schema] + if schemas is not None: body['schemas'] = [v.value for v in schemas] self._api.do('PATCH', f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body) def update(self, @@ -2278,7 +2278,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, - schema: Optional[List[PatchSchema]] = None): + schemas: Optional[List[PatchSchema]] = None): """Update user details. Partially updates a user resource by applying the supplied operations on specific user attributes. @@ -2286,14 +2286,14 @@ def patch(self, :param id: str Unique ID for a user in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. """ body = {} if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schema is not None: body['schema'] = [v.value for v in schema] + if schemas is not None: body['schemas'] = [v.value for v in schemas] self._api.do('PATCH', f'/api/2.0/preview/scim/v2/Users/{id}', body=body) def set_password_permissions( diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 14afc84e..201a600d 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -514,8 +514,14 @@ def from_dict(cls, d: Dict[str, any]) -> 'GitSnapshot': @dataclass class GitSource: - """An optional specification for a remote repository containing the notebooks used by this job's - notebook tasks.""" + """An optional specification for a remote Git repository containing the source code used by tasks. + Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. + + If `git_source` is set, these tasks retrieve the file from the remote repository by default. + However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. + + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks + are used, `git_source` must be defined on the job.""" git_url: str git_provider: 'GitProvider' @@ -2808,8 +2814,14 @@ def create(self, Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. :param git_source: :class:`GitSource` (optional) - An optional specification for a remote repository containing the notebooks used by this job's - notebook tasks. + An optional specification for a remote Git repository containing the source code used by tasks. + Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. + + If `git_source` is set, these tasks retrieve the file from the remote repository by default. + However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. + + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are + used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) An optional set of health rules that can be defined for this job. :param job_clusters: List[:class:`JobCluster`] (optional) @@ -3510,8 +3522,14 @@ def submit(self, An optional set of email addresses notified when the run begins or completes. The default behavior is to not send any emails. :param git_source: :class:`GitSource` (optional) - An optional specification for a remote repository containing the notebooks used by this job's - notebook tasks. + An optional specification for a remote Git repository containing the source code used by tasks. + Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. + + If `git_source` is set, these tasks retrieve the file from the remote repository by default. + However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. + + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are + used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) An optional set of health rules that can be defined for this job. :param idempotency_token: str (optional) diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index abeeedbf..2b8877c5 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.4.0' +__version__ = '0.5.0' diff --git a/docs/account/groups.rst b/docs/account/groups.rst index 134becf8..b3a86aa0 100644 --- a/docs/account/groups.rst +++ b/docs/account/groups.rst @@ -129,7 +129,7 @@ Account Groups :returns: Iterator over :class:`Group` - .. py:method:: patch(id [, operations, schema]) + .. py:method:: patch(id [, operations, schemas]) Update group details. @@ -138,7 +138,7 @@ Account Groups :param id: str Unique ID for a group in the Databricks account. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. diff --git a/docs/account/service_principals.rst b/docs/account/service_principals.rst index 8381d1cb..24032815 100644 --- a/docs/account/service_principals.rst +++ b/docs/account/service_principals.rst @@ -130,7 +130,7 @@ Account Service Principals :returns: Iterator over :class:`ServicePrincipal` - .. py:method:: patch(id [, operations, schema]) + .. py:method:: patch(id [, operations, schemas]) Update service principal details. @@ -139,7 +139,7 @@ Account Service Principals :param id: str Unique ID for a service principal in the Databricks account. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. diff --git a/docs/account/storage_credentials.rst b/docs/account/storage_credentials.rst index f2e22ef8..32d6b5c1 100644 --- a/docs/account/storage_credentials.rst +++ b/docs/account/storage_credentials.rst @@ -39,7 +39,7 @@ Account Storage Credentials Unity Catalog metastore ID :param credential_info: :class:`CreateStorageCredential` (optional) - :returns: :class:`StorageCredentialInfo` + :returns: :class:`AccountsStorageCredentialInfo` .. py:method:: delete(metastore_id, name [, force]) @@ -92,7 +92,7 @@ Account Storage Credentials :param name: str Name of the storage credential. - :returns: :class:`StorageCredentialInfo` + :returns: :class:`AccountsStorageCredentialInfo` .. py:method:: list(metastore_id) @@ -154,5 +154,5 @@ Account Storage Credentials Name of the storage credential. :param credential_info: :class:`UpdateStorageCredential` (optional) - :returns: :class:`StorageCredentialInfo` + :returns: :class:`AccountsStorageCredentialInfo` \ No newline at end of file diff --git a/docs/account/users.rst b/docs/account/users.rst index 693674e4..d75c1b35 100644 --- a/docs/account/users.rst +++ b/docs/account/users.rst @@ -152,7 +152,7 @@ Account Users :returns: Iterator over :class:`User` - .. py:method:: patch(id [, operations, schema]) + .. py:method:: patch(id [, operations, schemas]) Usage: @@ -184,7 +184,7 @@ Account Users :param id: str Unique ID for a user in the Databricks account. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. diff --git a/docs/workspace/clusters.rst b/docs/workspace/clusters.rst index 37f08be6..0af000eb 100644 --- a/docs/workspace/clusters.rst +++ b/docs/workspace/clusters.rst @@ -67,7 +67,7 @@ Clusters - .. py:method:: create(spark_version [, apply_policy_default_values, autoscale, autotermination_minutes, aws_attributes, azure_attributes, cluster_log_conf, cluster_name, cluster_source, custom_tags, driver_instance_pool_id, driver_node_type_id, enable_elastic_disk, enable_local_disk_encryption, gcp_attributes, init_scripts, instance_pool_id, node_type_id, num_workers, policy_id, runtime_engine, spark_conf, spark_env_vars, ssh_public_keys, workload_type]) + .. py:method:: create(spark_version [, apply_policy_default_values, autoscale, autotermination_minutes, aws_attributes, azure_attributes, cluster_log_conf, cluster_name, cluster_source, custom_tags, data_security_mode, docker_image, driver_instance_pool_id, driver_node_type_id, enable_elastic_disk, enable_local_disk_encryption, gcp_attributes, init_scripts, instance_pool_id, node_type_id, num_workers, policy_id, runtime_engine, single_user_name, spark_conf, spark_env_vars, ssh_public_keys, workload_type]) Usage: @@ -139,6 +139,9 @@ Clusters - Currently, Databricks allows at most 45 custom tags - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags + :param data_security_mode: :class:`DataSecurityMode` (optional) + This describes an enum + :param docker_image: :class:`DockerImage` (optional) :param driver_instance_pool_id: str (optional) The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. @@ -179,6 +182,8 @@ Clusters :param runtime_engine: :class:`RuntimeEngine` (optional) Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version. + :param single_user_name: str (optional) + Single user name if data_security_mode is `SINGLE_USER` :param spark_conf: Dict[str,str] (optional) An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via diff --git a/docs/workspace/connections.rst b/docs/workspace/connections.rst index 4f439300..25283fea 100644 --- a/docs/workspace/connections.rst +++ b/docs/workspace/connections.rst @@ -11,7 +11,7 @@ Connections objects based on cloud storage. Users may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings. - .. py:method:: create(name, connection_type, options_kvpairs [, comment, owner, properties_kvpairs, read_only]) + .. py:method:: create(name, connection_type, options [, comment, owner, properties, read_only]) Create a connection. @@ -24,13 +24,13 @@ Connections Name of the connection. :param connection_type: :class:`ConnectionType` The type of connection. - :param options_kvpairs: Dict[str,str] + :param options: Dict[str,str] A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. :param owner: str (optional) Username of current owner of the connection. - :param properties_kvpairs: Dict[str,str] (optional) + :param properties: Dict[str,str] (optional) An object containing map of key-value properties attached to the connection. :param read_only: bool (optional) If the connection is read only. @@ -71,7 +71,7 @@ Connections :returns: Iterator over :class:`ConnectionInfo` - .. py:method:: update(name, options_kvpairs, name_arg) + .. py:method:: update(name, options, name_arg) Update a connection. @@ -79,7 +79,7 @@ Connections :param name: str Name of the connection. - :param options_kvpairs: Dict[str,str] + :param options: Dict[str,str] A map of key-value properties attached to the securable. :param name_arg: str Name of the connection. diff --git a/docs/workspace/dbfs.rst b/docs/workspace/dbfs.rst index 3684ced3..754e8b8d 100644 --- a/docs/workspace/dbfs.rst +++ b/docs/workspace/dbfs.rst @@ -1,4 +1,4 @@ -Dbfs +DBFS ==== .. py:class:: DbfsExt diff --git a/docs/workspace/groups.rst b/docs/workspace/groups.rst index 1ed4d645..d9cf2bc4 100644 --- a/docs/workspace/groups.rst +++ b/docs/workspace/groups.rst @@ -129,7 +129,7 @@ Groups :returns: Iterator over :class:`Group` - .. py:method:: patch(id [, operations, schema]) + .. py:method:: patch(id [, operations, schemas]) Update group details. @@ -138,7 +138,7 @@ Groups :param id: str Unique ID for a group in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. diff --git a/docs/workspace/instance_pools.rst b/docs/workspace/instance_pools.rst index 6b11c3c2..5c8d2c95 100644 --- a/docs/workspace/instance_pools.rst +++ b/docs/workspace/instance_pools.rst @@ -85,9 +85,9 @@ Instance Pools :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) - A list of preloaded Spark image versions for the pool. Pool-backed clusters started with the - preloaded Spark version will start faster. A list of available Spark versions can be retrieved by - using the :method:clusters/sparkVersions API call. + A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters + started with the preloaded Spark version will start faster. A list of available Spark versions can + be retrieved by using the :method:clusters/sparkVersions API call. :returns: :class:`CreateInstancePoolResponse` @@ -178,9 +178,9 @@ Instance Pools :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) - A list of preloaded Spark image versions for the pool. Pool-backed clusters started with the - preloaded Spark version will start faster. A list of available Spark versions can be retrieved by - using the :method:clusters/sparkVersions API call. + A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters + started with the preloaded Spark version will start faster. A list of available Spark versions can + be retrieved by using the :method:clusters/sparkVersions API call. diff --git a/docs/workspace/jobs.rst b/docs/workspace/jobs.rst index ac71f6d5..8323bb1d 100644 --- a/docs/workspace/jobs.rst +++ b/docs/workspace/jobs.rst @@ -159,8 +159,14 @@ Jobs Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. :param git_source: :class:`GitSource` (optional) - An optional specification for a remote repository containing the notebooks used by this job's - notebook tasks. + An optional specification for a remote Git repository containing the source code used by tasks. + Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. + + If `git_source` is set, these tasks retrieve the file from the remote repository by default. + However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. + + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are + used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) An optional set of health rules that can be defined for this job. :param job_clusters: List[:class:`JobCluster`] (optional) @@ -876,8 +882,14 @@ Jobs An optional set of email addresses notified when the run begins or completes. The default behavior is to not send any emails. :param git_source: :class:`GitSource` (optional) - An optional specification for a remote repository containing the notebooks used by this job's - notebook tasks. + An optional specification for a remote Git repository containing the source code used by tasks. + Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. + + If `git_source` is set, these tasks retrieve the file from the remote repository by default. + However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. + + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are + used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) An optional set of health rules that can be defined for this job. :param idempotency_token: str (optional) diff --git a/docs/workspace/permissions.rst b/docs/workspace/permissions.rst index 0976299a..b2cdbe0c 100644 --- a/docs/workspace/permissions.rst +++ b/docs/workspace/permissions.rst @@ -128,12 +128,12 @@ Permissions obj = w.workspace.get_status(path=notebook_path) - w.permissions.set(request_object_type="notebooks", - request_object_id="%d" % (obj.object_id), - access_control_list=[ - iam.AccessControlRequest(group_name=group.display_name, - permission_level=iam.PermissionLevel.CAN_RUN) - ]) + _ = w.permissions.set(request_object_type="notebooks", + request_object_id="%d" % (obj.object_id), + access_control_list=[ + iam.AccessControlRequest(group_name=group.display_name, + permission_level=iam.PermissionLevel.CAN_RUN) + ]) # cleanup w.groups.delete(id=group.id) diff --git a/docs/workspace/service_principals.rst b/docs/workspace/service_principals.rst index 5bf0e93d..52e36977 100644 --- a/docs/workspace/service_principals.rst +++ b/docs/workspace/service_principals.rst @@ -130,7 +130,7 @@ Service Principals :returns: Iterator over :class:`ServicePrincipal` - .. py:method:: patch(id [, operations, schema]) + .. py:method:: patch(id [, operations, schemas]) Update service principal details. @@ -139,7 +139,7 @@ Service Principals :param id: str Unique ID for a service principal in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. diff --git a/docs/workspace/users.rst b/docs/workspace/users.rst index bc6fbb92..7bd69a45 100644 --- a/docs/workspace/users.rst +++ b/docs/workspace/users.rst @@ -170,7 +170,7 @@ Users :returns: Iterator over :class:`User` - .. py:method:: patch(id [, operations, schema]) + .. py:method:: patch(id [, operations, schemas]) Usage: @@ -202,7 +202,7 @@ Users :param id: str Unique ID for a user in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) - :param schema: List[:class:`PatchSchema`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. diff --git a/examples/permissions/set_generic_permissions.py b/examples/permissions/set_generic_permissions.py index dfc54c07..6e42b818 100755 --- a/examples/permissions/set_generic_permissions.py +++ b/examples/permissions/set_generic_permissions.py @@ -11,12 +11,12 @@ obj = w.workspace.get_status(path=notebook_path) -w.permissions.set(request_object_type="notebooks", - request_object_id="%d" % (obj.object_id), - access_control_list=[ - iam.AccessControlRequest(group_name=group.display_name, - permission_level=iam.PermissionLevel.CAN_RUN) - ]) +_ = w.permissions.set(request_object_type="notebooks", + request_object_id="%d" % (obj.object_id), + access_control_list=[ + iam.AccessControlRequest(group_name=group.display_name, + permission_level=iam.PermissionLevel.CAN_RUN) + ]) # cleanup w.groups.delete(id=group.id)