|
15 | 15 |
|
16 | 16 | 'use strict';
|
17 | 17 |
|
18 |
| -const BigQuery = require('@google-cloud/bigquery'); |
| 18 | +function createDataset (datasetId, projectId) { |
| 19 | + // [START bigquery_create_dataset] |
| 20 | + // Imports the Google Cloud client library |
| 21 | + const BigQuery = require('@google-cloud/bigquery'); |
| 22 | + |
| 23 | + // The project ID to use, e.g. "your-project-id" |
| 24 | + // const projectId = "your-project-id"; |
19 | 25 |
|
20 |
| -// [START bigquery_create_dataset] |
21 |
| -function createDataset (datasetId) { |
22 | 26 | // Instantiates a client
|
23 |
| - const bigquery = BigQuery(); |
| 27 | + const bigquery = BigQuery({ |
| 28 | + projectId: projectId |
| 29 | + }); |
| 30 | + |
| 31 | + // The ID for the new dataset, e.g. "my_new_dataset" |
| 32 | + // const datasetId = "my_new_dataset"; |
24 | 33 |
|
25 |
| - // Creates a new dataset, e.g. "my_new_dataset" |
26 |
| - return bigquery.createDataset(datasetId) |
| 34 | + // Creates a new dataset |
| 35 | + bigquery.createDataset(datasetId) |
27 | 36 | .then((results) => {
|
28 | 37 | const dataset = results[0];
|
29 | 38 | console.log(`Dataset ${dataset.id} created.`);
|
30 |
| - return dataset; |
| 39 | + }) |
| 40 | + .catch((err) => { |
| 41 | + console.error('ERROR:', err); |
31 | 42 | });
|
| 43 | + // [END bigquery_create_dataset] |
32 | 44 | }
|
33 |
| -// [END bigquery_create_dataset] |
34 | 45 |
|
35 |
| -// [START bigquery_delete_dataset] |
36 |
| -function deleteDataset (datasetId) { |
| 46 | +function deleteDataset (datasetId, projectId) { |
| 47 | + // [START bigquery_delete_dataset] |
| 48 | + // Imports the Google Cloud client library |
| 49 | + const BigQuery = require('@google-cloud/bigquery'); |
| 50 | + |
| 51 | + // The project ID to use, e.g. "your-project-id" |
| 52 | + // const projectId = "your-project-id"; |
| 53 | + |
37 | 54 | // Instantiates a client
|
38 |
| - const bigquery = BigQuery(); |
| 55 | + const bigquery = BigQuery({ |
| 56 | + projectId: projectId |
| 57 | + }); |
| 58 | + |
| 59 | + // The ID of the dataset to delete, e.g. "my_new_dataset" |
| 60 | + // const datasetId = "my_new_dataset"; |
39 | 61 |
|
40 |
| - // References an existing dataset, e.g. "my_dataset" |
| 62 | + // Creates a reference to the existing dataset |
41 | 63 | const dataset = bigquery.dataset(datasetId);
|
42 | 64 |
|
43 | 65 | // Deletes the dataset
|
44 |
| - return dataset.delete() |
| 66 | + dataset.delete() |
45 | 67 | .then(() => {
|
46 | 68 | console.log(`Dataset ${dataset.id} deleted.`);
|
| 69 | + }) |
| 70 | + .catch((err) => { |
| 71 | + console.error('ERROR:', err); |
47 | 72 | });
|
| 73 | + // [END bigquery_delete_dataset] |
48 | 74 | }
|
49 |
| -// [END bigquery_delete_dataset] |
50 | 75 |
|
51 |
| -// [START bigquery_list_datasets] |
52 | 76 | function listDatasets (projectId) {
|
| 77 | + // [START bigquery_list_datasets] |
| 78 | + // Imports the Google Cloud client library |
| 79 | + const BigQuery = require('@google-cloud/bigquery'); |
| 80 | + |
| 81 | + // The project ID to use, e.g. "your-project-id" |
| 82 | + // const projectId = "your-project-id"; |
| 83 | + |
53 | 84 | // Instantiates a client
|
54 | 85 | const bigquery = BigQuery({
|
55 | 86 | projectId: projectId
|
56 | 87 | });
|
57 | 88 |
|
58 | 89 | // Lists all datasets in the specified project
|
59 |
| - return bigquery.getDatasets() |
| 90 | + bigquery.getDatasets() |
60 | 91 | .then((results) => {
|
61 | 92 | const datasets = results[0];
|
62 | 93 | console.log('Datasets:');
|
63 | 94 | datasets.forEach((dataset) => console.log(dataset.id));
|
64 |
| - return datasets; |
| 95 | + }) |
| 96 | + .catch((err) => { |
| 97 | + console.error('ERROR:', err); |
65 | 98 | });
|
| 99 | + // [END bigquery_list_datasets] |
66 | 100 | }
|
67 |
| -// [END bigquery_list_datasets] |
68 |
| - |
69 |
| -// [START bigquery_get_dataset_size] |
70 |
| -function getDatasetSize (datasetId, projectId) { |
71 |
| - // Instantiate a client |
72 |
| - const bigquery = BigQuery({ |
73 |
| - projectId: projectId |
74 |
| - }); |
75 |
| - |
76 |
| - // References an existing dataset, e.g. "my_dataset" |
77 |
| - const dataset = bigquery.dataset(datasetId); |
78 | 101 |
|
79 |
| - // Lists all tables in the dataset |
80 |
| - return dataset.getTables() |
81 |
| - .then((results) => results[0]) |
82 |
| - // Retrieve the metadata for each table |
83 |
| - .then((tables) => Promise.all(tables.map((table) => table.get()))) |
84 |
| - .then((results) => results.map((result) => result[0])) |
85 |
| - // Select the size of each table |
86 |
| - .then((tables) => tables.map((table) => (parseInt(table.metadata.numBytes, 10) / 1000) / 1000)) |
87 |
| - // Sum up the sizes |
88 |
| - .then((sizes) => sizes.reduce((cur, prev) => cur + prev, 0)) |
89 |
| - // Print and return the size |
90 |
| - .then((sum) => { |
91 |
| - console.log(`Size of ${dataset.id}: ${sum} MB`); |
92 |
| - return sum; |
93 |
| - }); |
94 |
| -} |
95 |
| -// [END bigquery_get_dataset_size] |
96 |
| - |
97 |
| -// The command-line program |
98 |
| -const cli = require(`yargs`); |
99 |
| - |
100 |
| -const program = module.exports = { |
101 |
| - createDataset: createDataset, |
102 |
| - deleteDataset: deleteDataset, |
103 |
| - listDatasets: listDatasets, |
104 |
| - getDatasetSize: getDatasetSize, |
105 |
| - main: (args) => { |
106 |
| - // Run the command-line program |
107 |
| - cli.help().strict().parse(args).argv; // eslint-disable-line |
108 |
| - } |
109 |
| -}; |
110 |
| - |
111 |
| -cli |
| 102 | +const cli = require(`yargs`) |
112 | 103 | .demand(1)
|
113 |
| - .command(`create <datasetId>`, `Creates a new dataset.`, {}, (opts) => { |
114 |
| - program.createDataset(opts.datasetId); |
115 |
| - }) |
116 |
| - .command(`delete <datasetId>`, `Deletes a dataset.`, {}, (opts) => { |
117 |
| - program.deleteDataset(opts.datasetId); |
118 |
| - }) |
119 |
| - .command(`list [projectId]`, `Lists all datasets in the specified project or the current project.`, {}, (opts) => { |
120 |
| - program.listDatasets(opts.projectId || process.env.GCLOUD_PROJECT); |
121 |
| - }) |
122 |
| - .command(`size <datasetId> [projectId]`, `Calculates the size of a dataset.`, {}, (opts) => { |
123 |
| - program.getDatasetSize(opts.datasetId, opts.projectId || process.env.GCLOUD_PROJECT); |
| 104 | + .options({ |
| 105 | + projectId: { |
| 106 | + alias: 'p', |
| 107 | + default: process.env.GCLOUD_PROJECT || process.env.GOOGLE_CLOUD_PROJECT, |
| 108 | + description: 'The Project ID to use. Defaults to the value of the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variables.', |
| 109 | + requiresArg: true, |
| 110 | + type: 'string' |
| 111 | + } |
124 | 112 | })
|
| 113 | + .command( |
| 114 | + `create <datasetId>`, |
| 115 | + `Creates a new dataset.`, |
| 116 | + {}, |
| 117 | + (opts) => createDataset(opts.datasetId, opts.projectId) |
| 118 | + ) |
| 119 | + .command( |
| 120 | + `delete <datasetId>`, |
| 121 | + `Deletes a dataset.`, |
| 122 | + {}, |
| 123 | + (opts) => deleteDataset(opts.datasetId, opts.projectId) |
| 124 | + ) |
| 125 | + .command( |
| 126 | + `list`, |
| 127 | + `Lists datasets.`, |
| 128 | + {}, |
| 129 | + (opts) => listDatasets(opts.projectId) |
| 130 | + ) |
125 | 131 | .example(`node $0 create my_dataset`, `Creates a new dataset named "my_dataset".`)
|
126 | 132 | .example(`node $0 delete my_dataset`, `Deletes a dataset named "my_dataset".`)
|
127 |
| - .example(`node $0 list`, `Lists all datasets in the current project.`) |
128 |
| - .example(`node $0 list bigquery-public-data`, `Lists all datasets in the "bigquery-public-data" project.`) |
129 |
| - .example(`node $0 size my_dataset`, `Calculates the size of "my_dataset" in the current project.`) |
130 |
| - .example(`node $0 size hacker_news bigquery-public-data`, `Calculates the size of "bigquery-public-data:hacker_news".`) |
| 133 | + .example(`node $0 list`, `Lists all datasets in the project specified by the GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environments variables.`) |
| 134 | + .example(`node $0 list --projectId=bigquery-public-data`, `Lists all datasets in the "bigquery-public-data" project.`) |
131 | 135 | .wrap(120)
|
132 | 136 | .recommendCommands()
|
133 |
| - .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`); |
| 137 | + .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`) |
| 138 | + .help() |
| 139 | + .strict(); |
134 | 140 |
|
135 | 141 | if (module === require.main) {
|
136 |
| - program.main(process.argv.slice(2)); |
| 142 | + cli.parse(process.argv.slice(2)); |
137 | 143 | }
|
0 commit comments