From 7a866a6502a6d0a5c00150ffbabd53baeaf6b4b8 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Mon, 15 Jul 2019 13:11:34 -0700 Subject: [PATCH] fix: update to the latest version of the service (#232) --- .../datatransfer/v1/datatransfer.proto | 65 +--- .../bigquery/datatransfer/v1/transfer.proto | 36 +- .../data_transfer_service_smoke_test.js | 6 +- .../src/v1/data_transfer_service_client.js | 324 ++++++++++++------ .../data_transfer_service_client_config.json | 10 - .../datatransfer/v1/doc_datatransfer.js | 65 +--- .../bigquery/datatransfer/v1/doc_transfer.js | 42 +-- .../src/v1/doc/google/protobuf/doc_any.js | 2 +- .../v1/doc/google/protobuf/doc_timestamp.js | 4 +- .../synth.metadata | 10 +- .../synth.py | 10 - .../test/gapic-v1.js | 121 ------- 12 files changed, 243 insertions(+), 452 deletions(-) diff --git a/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/datatransfer.proto b/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/datatransfer.proto index 230d7c37b04..e9a39683494 100644 --- a/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/datatransfer.proto +++ b/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/datatransfer.proto @@ -39,6 +39,7 @@ option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; // up the frontend. service DataTransferService { option (google.api.default_host) = "bigquerydatatransfer.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; // Retrieves a supported data source and returns its settings, // which can be used for UI rendering. @@ -206,29 +207,6 @@ service DataTransferService { } }; } - - // Enables data transfer service for a given project. This - // method requires the additional scope of - // 'https://www.googleapis.com/auth/cloudplatformprojects' - // to manage the cloud project permissions. - rpc EnableDataTransferService(EnableDataTransferServiceRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*}:enableDataTransferService" - body: "*" - }; - } - - // Returns true if data transfer is enabled for a project. - rpc IsDataTransferServiceEnabled(IsDataTransferServiceEnabledRequest) returns (IsDataTransferServiceEnabledResponse) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*}:isDataTransferServiceEnabled" - body: "*" - additional_bindings { - post: "/v1/{name=projects/*}:isEnabled" - body: "*" - } - }; - } } // Represents a data source parameter with validation rules, so that @@ -308,6 +286,10 @@ message DataSourceParameter { // Deprecated. This field has no effect. bool recurse = 15; + + // If true, it should not be used in new transfers, and it should not be + // visible to users. + bool deprecated = 20; } // Represents data source metadata. Metadata is sufficient to @@ -410,12 +392,6 @@ message DataSource { // The minimum interval for scheduler to schedule runs. google.protobuf.Duration minimum_schedule_interval = 18; - - // Partner's legal name of this data source - string partner_legal_name = 22; - - // Redirect URL to complete transfer config setup for 3rd party data sources. - string redirect_url = 23; } // A request to get data source info. @@ -698,9 +674,6 @@ message ScheduleTransferRunsRequest { // `projects/{project_id}/transferConfigs/{config_id}`. string parent = 1; - // User labels to add to the scheduled runs. - map labels = 6; - // Start time of the range of transfer runs. For example, // `"2017-05-25T00:00:00+00:00"`. google.protobuf.Timestamp start_time = 2; @@ -738,9 +711,6 @@ message StartManualTransferRunsRequest { // `projects/{project_id}/transferConfigs/{config_id}`. string parent = 1; - // User labels to add to the backfilled runs. - map labels = 2; - // The requested time specification - this can be a time range or a specific // run_time. oneof time { @@ -758,28 +728,3 @@ message StartManualTransferRunsResponse { // The transfer runs that were created. repeated TransferRun runs = 1; } - -// A request to enable data transfer service for a project. -message EnableDataTransferServiceRequest { - // The name of the project resource in the form: - // `projects/{project_id}` - string name = 1; -} - -// A request to determine whether data transfer is enabled for the project. -message IsDataTransferServiceEnabledRequest { - // The name of the project resource in the form: - // `projects/{project_id}` - string name = 1; -} - -// A response to indicate whether data transfer service is enabled -// for the project. -message IsDataTransferServiceEnabledResponse { - // Indicates whether the data transfer service is enabled for the project. - bool enabled = 1; - - // A string that contains additional information about why the service is - // deemed not enabled. This is only available when `enable` is false. - string reason = 2; -} diff --git a/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/transfer.proto b/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/transfer.proto index 9ea305c9837..9501ea3cc3b 100644 --- a/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/transfer.proto +++ b/packages/google-cloud-bigquery-datatransfer/protos/google/cloud/bigquery/datatransfer/v1/transfer.proto @@ -160,24 +160,6 @@ message TransferConfig { // Output only. Region in which BigQuery dataset is located. string dataset_region = 14; - - // A unique identifier used for identifying a transfer setup stored on - // external partner side. The token is opaque to DTS and can only be - // interpreted by partner. Partner data source should create a mapping between - // the config id and the token to validate that a transfer config/run is - // legitimate. - string partner_token = 22; - - // Transfer settings managed by partner data sources. It is stored as - // key-value pairs and used for DTS UI display purpose only. Two reasons we - // don't want to store them together with 'params' are: - // - The connection info is provided by partner and not editable in DTS UI - // which is different from the immutable parameter. It will be confusing to - // add another boolean to DataSourceParameter to differentiate them. - // - The connection info can be any arbitrary key-value pairs. Adding them to - // params fields requires partner to provide definition for them in data - // source definition. It will be friendlier to avoid that for partners. - google.protobuf.Struct partner_connection_info = 23; } // Represents a data transfer run. @@ -188,14 +170,11 @@ message TransferRun { // The name is ignored when creating a transfer run. string name = 1; - // User labels. - map labels = 22; - // Minimum time after which a transfer run can be started. google.protobuf.Timestamp schedule_time = 3; - // For batch transfer runs, specifies the date and time that - // data should be ingested. + // For batch transfer runs, specifies the date and time of the data should be + // ingested. google.protobuf.Timestamp run_time = 10; // Status of the transfer run. @@ -233,14 +212,6 @@ message TransferRun { // NOTE: the system might choose to delay the schedule depending on the // current load, so `schedule_time` doesn't always match this. string schedule = 12; - - // Output only. This is the same token initialized from TransferConfig. - // Partner token is a unique identifier used for identifying a transfer setup - // stored on external partner side. The token is opaque to DTS and can only be - // interpreted by partner. Partner data source should create a mapping between - // the config id and the token to validate that a transfer config/run is - // legitimate. - string partner_token = 28; } // Represents a user facing message for a particular data transfer run. @@ -258,9 +229,6 @@ message TransferMessage { // Error message. ERROR = 3; - - // Debug message. - DEBUG = 4; } // Time when message was logged. diff --git a/packages/google-cloud-bigquery-datatransfer/smoke-test/data_transfer_service_smoke_test.js b/packages/google-cloud-bigquery-datatransfer/smoke-test/data_transfer_service_smoke_test.js index 02eaf92e631..3cccac7a833 100644 --- a/packages/google-cloud-bigquery-datatransfer/smoke-test/data_transfer_service_smoke_test.js +++ b/packages/google-cloud-bigquery-datatransfer/smoke-test/data_transfer_service_smoke_test.js @@ -28,7 +28,7 @@ describe('DataTransferServiceSmokeTest', () => { }); // Iterate over all elements. - const formattedParent = client.projectPath(projectId); + const formattedParent = client.locationPath(projectId, 'us-central1'); client .listDataSources({parent: formattedParent}) @@ -50,7 +50,7 @@ describe('DataTransferServiceSmokeTest', () => { }); // Or obtain the paged response. - const formattedParent = client.projectPath(projectId); + const formattedParent = client.locationPath(projectId, 'us-central1'); const options = {autoPaginate: false}; const callback = responses => { @@ -82,7 +82,7 @@ describe('DataTransferServiceSmokeTest', () => { // optional auth parameters. }); - const formattedParent = client.projectPath(projectId); + const formattedParent = client.locationPath(projectId, 'us-central1'); client .listDataSourcesStream({parent: formattedParent}) .on('data', element => { diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.js b/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.js index affdb4865a2..c2629d4602c 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.js +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.js @@ -103,6 +103,18 @@ class DataTransferServiceClient { // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. this._pathTemplates = { + locationPathTemplate: new gax.PathTemplate( + 'projects/{project}/locations/{location}' + ), + locationDataSourcePathTemplate: new gax.PathTemplate( + 'projects/{project}/locations/{location}/dataSources/{data_source}' + ), + locationRunPathTemplate: new gax.PathTemplate( + 'projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}' + ), + locationTransferConfigPathTemplate: new gax.PathTemplate( + 'projects/{project}/locations/{location}/transferConfigs/{transfer_config}' + ), projectPathTemplate: new gax.PathTemplate('projects/{project}'), projectDataSourcePathTemplate: new gax.PathTemplate( 'projects/{project}/dataSources/{data_source}' @@ -178,8 +190,6 @@ class DataTransferServiceClient { 'listTransferLogs', 'checkValidCreds', 'startManualTransferRuns', - 'enableDataTransferService', - 'isDataTransferServiceEnabled', ]; for (const methodName of dataTransferServiceStubMethods) { this._innerApiCalls[methodName] = gax.createApiCall( @@ -920,8 +930,6 @@ class DataTransferServiceClient { * `"2017-05-30T00:00:00+00:00"`. * * This object should have the same structure as [Timestamp]{@link google.protobuf.Timestamp} - * @param {Object.} [request.labels] - * User labels to add to the scheduled runs. * @param {Object} [options] * Optional parameters. You can override the default settings for this call, e.g, timeout, * retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/interfaces/CallOptions.html} for the details. @@ -1489,8 +1497,6 @@ class DataTransferServiceClient { * @param {string} [request.parent] * Transfer configuration name in the form: * `projects/{project_id}/transferConfigs/{config_id}`. - * @param {Object.} [request.labels] - * User labels to add to the backfilled runs. * @param {Object} [request.requestedTimeRange] * Time range for the transfer runs that should be started. * @@ -1550,120 +1556,73 @@ class DataTransferServiceClient { ); } + // -------------------- + // -- Path templates -- + // -------------------- + /** - * Enables data transfer service for a given project. This - * method requires the additional scope of - * 'https://www.googleapis.com/auth/cloudplatformprojects' - * to manage the cloud project permissions. + * Return a fully-qualified location resource name string. * - * @param {Object} request - * The request object that will be sent. - * @param {string} [request.name] - * The name of the project resource in the form: - * `projects/{project_id}` - * @param {Object} [options] - * Optional parameters. You can override the default settings for this call, e.g, timeout, - * retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/interfaces/CallOptions.html} for the details. - * @param {function(?Error)} [callback] - * The function which will be called with the result of the API call. - * @returns {Promise} - The promise which resolves when API call finishes. - * The promise has a method named "cancel" which cancels the ongoing API call. - * - * @example - * - * const bigqueryDataTransfer = require('@google-cloud/bigquery-data-transfer'); - * - * const client = new bigqueryDataTransfer.v1.DataTransferServiceClient({ - * // optional auth parameters. - * }); - * - * - * client.enableDataTransferService({}).catch(err => { - * console.error(err); - * }); + * @param {String} project + * @param {String} location + * @returns {String} */ - enableDataTransferService(request, options, callback) { - if (options instanceof Function && callback === undefined) { - callback = options; - options = {}; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - name: request.name, + locationPath(project, location) { + return this._pathTemplates.locationPathTemplate.render({ + project: project, + location: location, }); - - return this._innerApiCalls.enableDataTransferService( - request, - options, - callback - ); } /** - * Returns true if data transfer is enabled for a project. + * Return a fully-qualified location_data_source resource name string. * - * @param {Object} request - * The request object that will be sent. - * @param {string} [request.name] - * The name of the project resource in the form: - * `projects/{project_id}` - * @param {Object} [options] - * Optional parameters. You can override the default settings for this call, e.g, timeout, - * retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/interfaces/CallOptions.html} for the details. - * @param {function(?Error, ?Object)} [callback] - * The function which will be called with the result of the API call. - * - * The second parameter to the callback is an object representing [IsDataTransferServiceEnabledResponse]{@link google.cloud.bigquery.datatransfer.v1.IsDataTransferServiceEnabledResponse}. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [IsDataTransferServiceEnabledResponse]{@link google.cloud.bigquery.datatransfer.v1.IsDataTransferServiceEnabledResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. - * - * @example - * - * const bigqueryDataTransfer = require('@google-cloud/bigquery-data-transfer'); - * - * const client = new bigqueryDataTransfer.v1.DataTransferServiceClient({ - * // optional auth parameters. - * }); - * - * - * client.isDataTransferServiceEnabled({}) - * .then(responses => { - * const response = responses[0]; - * // doThingsWith(response) - * }) - * .catch(err => { - * console.error(err); - * }); + * @param {String} project + * @param {String} location + * @param {String} dataSource + * @returns {String} */ - isDataTransferServiceEnabled(request, options, callback) { - if (options instanceof Function && callback === undefined) { - callback = options; - options = {}; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - name: request.name, + locationDataSourcePath(project, location, dataSource) { + return this._pathTemplates.locationDataSourcePathTemplate.render({ + project: project, + location: location, + data_source: dataSource, }); + } - return this._innerApiCalls.isDataTransferServiceEnabled( - request, - options, - callback - ); + /** + * Return a fully-qualified location_run resource name string. + * + * @param {String} project + * @param {String} location + * @param {String} transferConfig + * @param {String} run + * @returns {String} + */ + locationRunPath(project, location, transferConfig, run) { + return this._pathTemplates.locationRunPathTemplate.render({ + project: project, + location: location, + transfer_config: transferConfig, + run: run, + }); } - // -------------------- - // -- Path templates -- - // -------------------- + /** + * Return a fully-qualified location_transfer_config resource name string. + * + * @param {String} project + * @param {String} location + * @param {String} transferConfig + * @returns {String} + */ + locationTransferConfigPath(project, location, transferConfig) { + return this._pathTemplates.locationTransferConfigPathTemplate.render({ + project: project, + location: location, + transfer_config: transferConfig, + }); + } /** * Return a fully-qualified project resource name string. @@ -1721,6 +1680,157 @@ class DataTransferServiceClient { }); } + /** + * Parse the locationName from a location resource. + * + * @param {String} locationName + * A fully-qualified path representing a location resources. + * @returns {String} - A string representing the project. + */ + matchProjectFromLocationName(locationName) { + return this._pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the locationName from a location resource. + * + * @param {String} locationName + * A fully-qualified path representing a location resources. + * @returns {String} - A string representing the location. + */ + matchLocationFromLocationName(locationName) { + return this._pathTemplates.locationPathTemplate.match(locationName) + .location; + } + + /** + * Parse the locationDataSourceName from a location_data_source resource. + * + * @param {String} locationDataSourceName + * A fully-qualified path representing a location_data_source resources. + * @returns {String} - A string representing the project. + */ + matchProjectFromLocationDataSourceName(locationDataSourceName) { + return this._pathTemplates.locationDataSourcePathTemplate.match( + locationDataSourceName + ).project; + } + + /** + * Parse the locationDataSourceName from a location_data_source resource. + * + * @param {String} locationDataSourceName + * A fully-qualified path representing a location_data_source resources. + * @returns {String} - A string representing the location. + */ + matchLocationFromLocationDataSourceName(locationDataSourceName) { + return this._pathTemplates.locationDataSourcePathTemplate.match( + locationDataSourceName + ).location; + } + + /** + * Parse the locationDataSourceName from a location_data_source resource. + * + * @param {String} locationDataSourceName + * A fully-qualified path representing a location_data_source resources. + * @returns {String} - A string representing the data_source. + */ + matchDataSourceFromLocationDataSourceName(locationDataSourceName) { + return this._pathTemplates.locationDataSourcePathTemplate.match( + locationDataSourceName + ).data_source; + } + + /** + * Parse the locationRunName from a location_run resource. + * + * @param {String} locationRunName + * A fully-qualified path representing a location_run resources. + * @returns {String} - A string representing the project. + */ + matchProjectFromLocationRunName(locationRunName) { + return this._pathTemplates.locationRunPathTemplate.match(locationRunName) + .project; + } + + /** + * Parse the locationRunName from a location_run resource. + * + * @param {String} locationRunName + * A fully-qualified path representing a location_run resources. + * @returns {String} - A string representing the location. + */ + matchLocationFromLocationRunName(locationRunName) { + return this._pathTemplates.locationRunPathTemplate.match(locationRunName) + .location; + } + + /** + * Parse the locationRunName from a location_run resource. + * + * @param {String} locationRunName + * A fully-qualified path representing a location_run resources. + * @returns {String} - A string representing the transfer_config. + */ + matchTransferConfigFromLocationRunName(locationRunName) { + return this._pathTemplates.locationRunPathTemplate.match(locationRunName) + .transfer_config; + } + + /** + * Parse the locationRunName from a location_run resource. + * + * @param {String} locationRunName + * A fully-qualified path representing a location_run resources. + * @returns {String} - A string representing the run. + */ + matchRunFromLocationRunName(locationRunName) { + return this._pathTemplates.locationRunPathTemplate.match(locationRunName) + .run; + } + + /** + * Parse the locationTransferConfigName from a location_transfer_config resource. + * + * @param {String} locationTransferConfigName + * A fully-qualified path representing a location_transfer_config resources. + * @returns {String} - A string representing the project. + */ + matchProjectFromLocationTransferConfigName(locationTransferConfigName) { + return this._pathTemplates.locationTransferConfigPathTemplate.match( + locationTransferConfigName + ).project; + } + + /** + * Parse the locationTransferConfigName from a location_transfer_config resource. + * + * @param {String} locationTransferConfigName + * A fully-qualified path representing a location_transfer_config resources. + * @returns {String} - A string representing the location. + */ + matchLocationFromLocationTransferConfigName(locationTransferConfigName) { + return this._pathTemplates.locationTransferConfigPathTemplate.match( + locationTransferConfigName + ).location; + } + + /** + * Parse the locationTransferConfigName from a location_transfer_config resource. + * + * @param {String} locationTransferConfigName + * A fully-qualified path representing a location_transfer_config resources. + * @returns {String} - A string representing the transfer_config. + */ + matchTransferConfigFromLocationTransferConfigName( + locationTransferConfigName + ) { + return this._pathTemplates.locationTransferConfigPathTemplate.match( + locationTransferConfigName + ).transfer_config; + } + /** * Parse the projectName from a project resource. * diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client_config.json b/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client_config.json index d66f39508db..07463d03d3b 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client_config.json +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client_config.json @@ -89,16 +89,6 @@ "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default" - }, - "EnableDataTransferService": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "IsDataTransferServiceEnabled": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" } } } diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_datatransfer.js b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_datatransfer.js index da3e8890b2f..32718ad0b35 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_datatransfer.js +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_datatransfer.js @@ -77,6 +77,10 @@ * @property {boolean} recurse * Deprecated. This field has no effect. * + * @property {boolean} deprecated + * If true, it should not be used in new transfers, and it should not be + * visible to users. + * * @typedef DataSourceParameter * @memberof google.cloud.bigquery.datatransfer.v1 * @see [google.cloud.bigquery.datatransfer.v1.DataSourceParameter definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/datatransfer.proto} @@ -212,12 +216,6 @@ const DataSourceParameter = { * * This object should have the same structure as [Duration]{@link google.protobuf.Duration} * - * @property {string} partnerLegalName - * Partner's legal name of this data source - * - * @property {string} redirectUrl - * Redirect URL to complete transfer config setup for 3rd party data sources. - * * @typedef DataSource * @memberof google.cloud.bigquery.datatransfer.v1 * @see [google.cloud.bigquery.datatransfer.v1.DataSource definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/datatransfer.proto} @@ -727,9 +725,6 @@ const CheckValidCredsResponse = { * Transfer configuration name in the form: * `projects/{project_id}/transferConfigs/{config_id}`. * - * @property {Object.} labels - * User labels to add to the scheduled runs. - * * @property {Object} startTime * Start time of the range of transfer runs. For example, * `"2017-05-25T00:00:00+00:00"`. @@ -773,9 +768,6 @@ const ScheduleTransferRunsResponse = { * Transfer configuration name in the form: * `projects/{project_id}/transferConfigs/{config_id}`. * - * @property {Object.} labels - * User labels to add to the backfilled runs. - * * @property {Object} requestedTimeRange * Time range for the transfer runs that should be started. * @@ -837,53 +829,4 @@ const StartManualTransferRunsRequest = { */ const StartManualTransferRunsResponse = { // This is for documentation. Actual contents will be loaded by gRPC. -}; - -/** - * A request to enable data transfer service for a project. - * - * @property {string} name - * The name of the project resource in the form: - * `projects/{project_id}` - * - * @typedef EnableDataTransferServiceRequest - * @memberof google.cloud.bigquery.datatransfer.v1 - * @see [google.cloud.bigquery.datatransfer.v1.EnableDataTransferServiceRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/datatransfer.proto} - */ -const EnableDataTransferServiceRequest = { - // This is for documentation. Actual contents will be loaded by gRPC. -}; - -/** - * A request to determine whether data transfer is enabled for the project. - * - * @property {string} name - * The name of the project resource in the form: - * `projects/{project_id}` - * - * @typedef IsDataTransferServiceEnabledRequest - * @memberof google.cloud.bigquery.datatransfer.v1 - * @see [google.cloud.bigquery.datatransfer.v1.IsDataTransferServiceEnabledRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/datatransfer.proto} - */ -const IsDataTransferServiceEnabledRequest = { - // This is for documentation. Actual contents will be loaded by gRPC. -}; - -/** - * A response to indicate whether data transfer service is enabled - * for the project. - * - * @property {boolean} enabled - * Indicates whether the data transfer service is enabled for the project. - * - * @property {string} reason - * A string that contains additional information about why the service is - * deemed not enabled. This is only available when `enable` is false. - * - * @typedef IsDataTransferServiceEnabledResponse - * @memberof google.cloud.bigquery.datatransfer.v1 - * @see [google.cloud.bigquery.datatransfer.v1.IsDataTransferServiceEnabledResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/datatransfer.proto} - */ -const IsDataTransferServiceEnabledResponse = { - // This is for documentation. Actual contents will be loaded by gRPC. }; \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_transfer.js b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_transfer.js index b3a93bddd48..e684dbded80 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_transfer.js +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/cloud/bigquery/datatransfer/v1/doc_transfer.js @@ -132,26 +132,6 @@ const ScheduleOptions = { * @property {string} datasetRegion * Output only. Region in which BigQuery dataset is located. * - * @property {string} partnerToken - * A unique identifier used for identifying a transfer setup stored on - * external partner side. The token is opaque to DTS and can only be - * interpreted by partner. Partner data source should create a mapping between - * the config id and the token to validate that a transfer config/run is - * legitimate. - * - * @property {Object} partnerConnectionInfo - * Transfer settings managed by partner data sources. It is stored as - * key-value pairs and used for DTS UI display purpose only. Two reasons we - * don't want to store them together with 'params' are: - * - The connection info is provided by partner and not editable in DTS UI - * which is different from the immutable parameter. It will be confusing to - * add another boolean to DataSourceParameter to differentiate them. - * - The connection info can be any arbitrary key-value pairs. Adding them to - * params fields requires partner to provide definition for them in data - * source definition. It will be friendlier to avoid that for partners. - * - * This object should have the same structure as [Struct]{@link google.protobuf.Struct} - * * @typedef TransferConfig * @memberof google.cloud.bigquery.datatransfer.v1 * @see [google.cloud.bigquery.datatransfer.v1.TransferConfig definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/transfer.proto} @@ -169,17 +149,14 @@ const TransferConfig = { * `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. * The name is ignored when creating a transfer run. * - * @property {Object.} labels - * User labels. - * * @property {Object} scheduleTime * Minimum time after which a transfer run can be started. * * This object should have the same structure as [Timestamp]{@link google.protobuf.Timestamp} * * @property {Object} runTime - * For batch transfer runs, specifies the date and time that - * data should be ingested. + * For batch transfer runs, specifies the date and time of the data should be + * ingested. * * This object should have the same structure as [Timestamp]{@link google.protobuf.Timestamp} * @@ -231,14 +208,6 @@ const TransferConfig = { * NOTE: the system might choose to delay the schedule depending on the * current load, so `schedule_time` doesn't always match this. * - * @property {string} partnerToken - * Output only. This is the same token initialized from TransferConfig. - * Partner token is a unique identifier used for identifying a transfer setup - * stored on external partner side. The token is opaque to DTS and can only be - * interpreted by partner. Partner data source should create a mapping between - * the config id and the token to validate that a transfer config/run is - * legitimate. - * * @typedef TransferRun * @memberof google.cloud.bigquery.datatransfer.v1 * @see [google.cloud.bigquery.datatransfer.v1.TransferRun definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/transfer.proto} @@ -296,12 +265,7 @@ const TransferMessage = { /** * Error message. */ - ERROR: 3, - - /** - * Debug message. - */ - DEBUG: 4 + ERROR: 3 } }; diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_any.js b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_any.js index 9ff5d007807..cdd2fc80e49 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_any.js +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_any.js @@ -125,7 +125,7 @@ * Schemes other than `http`, `https` (or the empty scheme) might be * used with implementation specific semantics. * - * @property {string} value + * @property {Buffer} value * Must be a valid serialized protocol buffer of the above specified type. * * @typedef Any diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_timestamp.js b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_timestamp.js index 98c19dbf0d3..b643b2d6c1e 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_timestamp.js +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/doc/google/protobuf/doc_timestamp.js @@ -93,7 +93,9 @@ * method. In Python, a standard `datetime.datetime` object can be converted * to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) * with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one - * can use the Joda Time's [`ISODateTimeFormat.dateTime()`](https://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D) to obtain a formatter capable of generating timestamps in this format. + * can use the Joda Time's [`ISODateTimeFormat.dateTime()`](https://cloud.google.com + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. * * @property {number} seconds * Represents seconds of UTC time since Unix epoch diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index 822a28755af..933dfa9fca8 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-11T11:09:29.045874Z", + "updateTime": "2019-07-15T18:43:48.201192Z", "sources": [ { "generator": { "name": "artman", - "version": "0.24.0", - "dockerImage": "googleapis/artman@sha256:ce425884865f57f18307e597bca1a74a3619b7098688d4995261f3ffb3488681" + "version": "0.29.4", + "dockerImage": "googleapis/artman@sha256:63f21e83cb92680b7001dc381069e962c9e6dee314fd8365ac554c07c89221fb" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "744feb9660b3194fcde37dea50038bde204f3573", - "internalRef": "252553801" + "sha": "47bd0c2ba33c28dd624a65dad382e02bb61d1618", + "internalRef": "257690259" } }, { diff --git a/packages/google-cloud-bigquery-datatransfer/synth.py b/packages/google-cloud-bigquery-datatransfer/synth.py index 4afb4ad9bbb..f2d67af744f 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.py +++ b/packages/google-cloud-bigquery-datatransfer/synth.py @@ -26,15 +26,5 @@ templates = common_templates.node_library() s.copy(templates) -# [START fix-dead-link] -s.replace('**/doc/google/protobuf/doc_timestamp.js', - 'https:\/\/cloud\.google\.com[\s\*]*http:\/\/(.*)[\s\*]*\)', - r"https://\1)") - -s.replace('**/doc/google/protobuf/doc_timestamp.js', - 'toISOString\]', - 'toISOString)') -# [END fix-dead-link] - subprocess.run(['npm', 'install']) subprocess.run(['npm', 'run', 'fix']) diff --git a/packages/google-cloud-bigquery-datatransfer/test/gapic-v1.js b/packages/google-cloud-bigquery-datatransfer/test/gapic-v1.js index 8fb9f1e23bc..8e6ca45281e 100644 --- a/packages/google-cloud-bigquery-datatransfer/test/gapic-v1.js +++ b/packages/google-cloud-bigquery-datatransfer/test/gapic-v1.js @@ -77,8 +77,6 @@ describe('DataTransferServiceClient', () => { const helpUrl = 'helpUrl-789431439'; const defaultDataRefreshWindowDays = 1804935157; const manualRunsDisabled = true; - const partnerLegalName = 'partnerLegalName-1307326424'; - const redirectUrl = 'redirectUrl951230092'; const expectedResponse = { name: name2, dataSourceId: dataSourceId, @@ -92,8 +90,6 @@ describe('DataTransferServiceClient', () => { helpUrl: helpUrl, defaultDataRefreshWindowDays: defaultDataRefreshWindowDays, manualRunsDisabled: manualRunsDisabled, - partnerLegalName: partnerLegalName, - redirectUrl: redirectUrl, }; // Mock Grpc layer @@ -240,7 +236,6 @@ describe('DataTransferServiceClient', () => { const disabled = true; const userId = 147132913; const datasetRegion = 'datasetRegion959248539'; - const partnerToken = 'partnerToken725173186'; const expectedResponse = { name: name, destinationDatasetId: destinationDatasetId, @@ -251,7 +246,6 @@ describe('DataTransferServiceClient', () => { disabled: disabled, userId: userId, datasetRegion: datasetRegion, - partnerToken: partnerToken, }; // Mock Grpc layer @@ -326,7 +320,6 @@ describe('DataTransferServiceClient', () => { const disabled = true; const userId = 147132913; const datasetRegion = 'datasetRegion959248539'; - const partnerToken = 'partnerToken725173186'; const expectedResponse = { name: name, destinationDatasetId: destinationDatasetId, @@ -337,7 +330,6 @@ describe('DataTransferServiceClient', () => { disabled: disabled, userId: userId, datasetRegion: datasetRegion, - partnerToken: partnerToken, }; // Mock Grpc layer @@ -474,7 +466,6 @@ describe('DataTransferServiceClient', () => { const disabled = true; const userId = 147132913; const datasetRegion = 'datasetRegion959248539'; - const partnerToken = 'partnerToken725173186'; const expectedResponse = { name: name2, destinationDatasetId: destinationDatasetId, @@ -485,7 +476,6 @@ describe('DataTransferServiceClient', () => { disabled: disabled, userId: userId, datasetRegion: datasetRegion, - partnerToken: partnerToken, }; // Mock Grpc layer @@ -705,14 +695,12 @@ describe('DataTransferServiceClient', () => { const dataSourceId = 'dataSourceId-1015796374'; const userId = 147132913; const schedule = 'schedule-697920873'; - const partnerToken = 'partnerToken725173186'; const expectedResponse = { name: name2, destinationDatasetId: destinationDatasetId, dataSourceId: dataSourceId, userId: userId, schedule: schedule, - partnerToken: partnerToken, }; // Mock Grpc layer @@ -1103,115 +1091,6 @@ describe('DataTransferServiceClient', () => { }); }); }); - - describe('enableDataTransferService', () => { - it('invokes enableDataTransferService without error', done => { - const client = new bigqueryDataTransferModule.v1.DataTransferServiceClient( - { - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - } - ); - - // Mock request - const request = {}; - - // Mock Grpc layer - client._innerApiCalls.enableDataTransferService = mockSimpleGrpcMethod( - request - ); - - client.enableDataTransferService(request, err => { - assert.ifError(err); - done(); - }); - }); - - it('invokes enableDataTransferService with error', done => { - const client = new bigqueryDataTransferModule.v1.DataTransferServiceClient( - { - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - } - ); - - // Mock request - const request = {}; - - // Mock Grpc layer - client._innerApiCalls.enableDataTransferService = mockSimpleGrpcMethod( - request, - null, - error - ); - - client.enableDataTransferService(request, err => { - assert(err instanceof Error); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - done(); - }); - }); - }); - - describe('isDataTransferServiceEnabled', () => { - it('invokes isDataTransferServiceEnabled without error', done => { - const client = new bigqueryDataTransferModule.v1.DataTransferServiceClient( - { - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - } - ); - - // Mock request - const request = {}; - - // Mock response - const enabled = false; - const reason = 'reason-934964668'; - const expectedResponse = { - enabled: enabled, - reason: reason, - }; - - // Mock Grpc layer - client._innerApiCalls.isDataTransferServiceEnabled = mockSimpleGrpcMethod( - request, - expectedResponse - ); - - client.isDataTransferServiceEnabled(request, (err, response) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes isDataTransferServiceEnabled with error', done => { - const client = new bigqueryDataTransferModule.v1.DataTransferServiceClient( - { - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - } - ); - - // Mock request - const request = {}; - - // Mock Grpc layer - client._innerApiCalls.isDataTransferServiceEnabled = mockSimpleGrpcMethod( - request, - null, - error - ); - - client.isDataTransferServiceEnabled(request, (err, response) => { - assert(err instanceof Error); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - }); - }); - }); }); function mockSimpleGrpcMethod(expectedRequest, response, error) {