Skip to content
This repository has been archived by the owner on Oct 17, 2023. It is now read-only.

Commit

Permalink
feat!: drop node8 support, support for async iterators (#482)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: The library now supports Node.js v10+. The last version to support Node.js v8 is tagged legacy-8 on NPM.

New feature: methods with pagination now support async iteration.
  • Loading branch information
alexander-fenster authored Mar 31, 2020
1 parent 63fd8e0 commit 4a5f047
Show file tree
Hide file tree
Showing 53 changed files with 4,254 additions and 1,958 deletions.
3 changes: 3 additions & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"extends": "./node_modules/gts"
}
15 changes: 0 additions & 15 deletions .eslintrc.yml

This file was deleted.

2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node: [8, 10, 12, 13]
node: [10, 12, 13]
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
Expand Down
8 changes: 0 additions & 8 deletions .prettierrc

This file was deleted.

17 changes: 17 additions & 0 deletions .prettierrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

module.exports = {
...require('gts/.prettierrc.json')
}
10 changes: 6 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"license": "Apache-2.0",
"author": "Google Inc.",
"engines": {
"node": ">=8.10.0"
"node": ">=10"
},
"repository": "googleapis/nodejs-translate",
"main": "build/src/index.js",
Expand Down Expand Up @@ -48,7 +48,7 @@
"@google-cloud/promisify": "^1.0.0",
"arrify": "^2.0.0",
"extend": "^3.0.2",
"google-gax": "^1.11.1",
"google-gax": "^2.0.1",
"is-html": "^2.0.0",
"protobufjs": "^6.8.8"
},
Expand All @@ -58,14 +58,15 @@
"@types/node": "^10.5.7",
"@types/proxyquire": "^1.3.28",
"@types/request": "^2.47.1",
"@types/sinon": "^7.5.2",
"c8": "^7.0.0",
"codecov": "^3.0.2",
"eslint": "^6.0.0",
"eslint-config-prettier": "^6.0.0",
"eslint-plugin-node": "^11.0.0",
"eslint-plugin-prettier": "^3.0.0",
"google-auth-library": "^5.7.0",
"gts": "^1.0.0",
"gts": "2.0.0-alpha.9",
"http2spy": "^1.1.0",
"jsdoc": "^3.6.2",
"jsdoc-fresh": "^1.0.1",
Expand All @@ -75,6 +76,7 @@
"pack-n-play": "^1.0.0-2",
"prettier": "^1.13.5",
"proxyquire": "^2.0.1",
"typescript": "3.6.4"
"sinon": "^9.0.1",
"typescript": "^3.8.3"
}
}
128 changes: 64 additions & 64 deletions samples/automl/automlTranslationDataset.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

async function createDataset(projectId) {
// [START automl_translation_create_dataset]
const automl = require(`@google-cloud/automl`);
const automl = require('@google-cloud/automl');

const client = new automl.AutoMlClient();
const computeRegion = 'us-central1';
Expand Down Expand Up @@ -55,25 +55,25 @@ async function createDataset(projectId) {

// Display the dataset information
console.log(`Dataset name: ${dataset.name}`);
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`);
console.log(`Dataset id: ${dataset.name.split('/').pop(-1)}`);
console.log(`Dataset display name: ${dataset.displayName}`);
console.log(`Dataset example count: ${dataset.exampleCount}`);
console.log(`Translation dataset specification:`);
console.log('Translation dataset specification:');
console.log(
`\tSource language code: ${dataset.translationDatasetMetadata.sourceLanguageCode}`
);
console.log(
`\tTarget language code: ${dataset.translationDatasetMetadata.targetLanguageCode}`
);
console.log(`Dataset create time:`);
console.log('Dataset create time:');
console.log(`\tseconds: ${dataset.createTime.seconds}`);
console.log(`\tnanos: ${dataset.createTime.nanos}`);
// [END automl_translation_create_dataset]
}

async function listDatasets(projectId, computeRegion, filter) {
// [START automl_translation_list_datasets]
const automl = require(`@google-cloud/automl`);
const automl = require('@google-cloud/automl');
const client = new automl.AutoMlClient();

/**
Expand All @@ -97,20 +97,20 @@ async function listDatasets(projectId, computeRegion, filter) {
console.log('No datasets found!');
return;
}
console.log(`List of datasets:`);
console.log('List of datasets:');
datasets.forEach(dataset => {
console.log(`Dataset name: ${dataset.name}`);
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`);
console.log(`Dataset id: ${dataset.name.split('/').pop(-1)}`);
console.log(`Dataset display name: ${dataset.displayName}`);
console.log(`Dataset example count: ${dataset.exampleCount}`);
console.log(`Translation dataset specification:`);
console.log('Translation dataset specification:');
console.log(
`\tSource language code: ${dataset.translationDatasetMetadata.sourceLanguageCode}`
);
console.log(
`\tTarget language code: ${dataset.translationDatasetMetadata.targetLanguageCode}`
);
console.log(`Dataset create time:`);
console.log('Dataset create time:');
console.log(`\tseconds: ${dataset.createTime.seconds}`);
console.log(`\tnanos: ${dataset.createTime.nanos}`);
});
Expand All @@ -119,7 +119,7 @@ async function listDatasets(projectId, computeRegion, filter) {

async function getDataset(projectId, computeRegion, datasetId) {
// [START automl_translation_get_dataset]
const automl = require(`@google-cloud/automl`);
const automl = require('@google-cloud/automl');
const client = new automl.AutoMlClient();

/**
Expand All @@ -137,17 +137,17 @@ async function getDataset(projectId, computeRegion, datasetId) {

// Display the dataset information.
console.log(`Dataset name: ${dataset.name}`);
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`);
console.log(`Dataset id: ${dataset.name.split('/').pop(-1)}`);
console.log(`Dataset display name: ${dataset.displayName}`);
console.log(`Dataset example count: ${dataset.exampleCount}`);
console.log(`Translation dataset specification:`);
console.log('Translation dataset specification:');
console.log(
`\tSource language code: ${dataset.translationDatasetMetadata.sourceLanguageCode}`
);
console.log(
`\tTarget language code: ${dataset.translationDatasetMetadata.targetLanguageCode}`
);
console.log(`Dataset create time:`);
console.log('Dataset create time:');
console.log(`\tseconds: ${dataset.createTime.seconds}`);
console.log(`\tnanos: ${dataset.createTime.nanos}`);

Expand All @@ -156,7 +156,7 @@ async function getDataset(projectId, computeRegion, datasetId) {

async function importData(projectId, computeRegion, datasetId, path) {
// [START automl_translation_import_data]
const automl = require(`@google-cloud/automl`);
const automl = require('@google-cloud/automl');

const client = new automl.AutoMlClient();

Expand All @@ -172,7 +172,7 @@ async function importData(projectId, computeRegion, datasetId, path) {
const datasetFullId = client.datasetPath(projectId, computeRegion, datasetId);

// Get the multiple Google Cloud Storage URIs.
const inputUris = path.split(`,`);
const inputUris = path.split(',');
const inputConfig = {
gcsSource: {
inputUris: inputUris,
Expand All @@ -184,19 +184,19 @@ async function importData(projectId, computeRegion, datasetId, path) {
name: datasetFullId,
inputConfig: inputConfig,
});
console.log(`Processing import...`);
console.log('Processing import...');
const operationResponses = await operation.promise();
// The final result of the operation.
if (operationResponses[2].done === true) {
console.log(`Data imported.`);
console.log('Data imported.');
}

// [END automl_translation_import_data]
}

async function deleteDataset(projectId, computeRegion, datasetId) {
// [START automl_translation_delete_dataset]
const automl = require(`@google-cloud/automl`);
const automl = require('@google-cloud/automl');
const client = new automl.AutoMlClient();

/**
Expand All @@ -213,85 +213,85 @@ async function deleteDataset(projectId, computeRegion, datasetId) {
const [operations] = await client.deleteDataset({name: datasetFullId});
const operationResponses = await operations.promise();
// The final result of the operation.
if (operationResponses[2].done === true) console.log(`Dataset deleted.`);
if (operationResponses[2].done === true) console.log('Dataset deleted.');

// [END automl_translation_delete_dataset]
}

require(`yargs`)
require('yargs')
.demand(1)
.options({
computeRegion: {
alias: `c`,
type: `string`,
alias: 'c',
type: 'string',
default: 'us-central1',
requiresArg: true,
description: `region name e.g. "us-central1"`,
description: 'region name e.g. "us-central1"',
},
datasetName: {
alias: `n`,
type: `string`,
default: `testDataSet`,
alias: 'n',
type: 'string',
default: 'testDataSet',
requiresArg: true,
description: `Name of the Dataset`,
description: 'Name of the Dataset',
},
datasetId: {
alias: `i`,
type: `string`,
alias: 'i',
type: 'string',
requiresArg: true,
description: `Id of the dataset`,
description: 'Id of the dataset',
},
filter: {
alias: `f`,
default: `translationDatasetMetadata:*`,
type: `string`,
alias: 'f',
default: 'translationDatasetMetadata:*',
type: 'string',
requiresArg: true,
description: `Name of the Dataset to search for`,
description: 'Name of the Dataset to search for',
},
multilabel: {
alias: `m`,
type: `string`,
alias: 'm',
type: 'string',
default: false,
requiresArg: true,
description:
`Type of the classification problem, ` +
`False - MULTICLASS, True - MULTILABEL.`,
'Type of the classification problem, ' +
'False - MULTICLASS, True - MULTILABEL.',
},
outputUri: {
alias: `o`,
type: `string`,
alias: 'o',
type: 'string',
requiresArg: true,
description: `URI (or local path) to export dataset`,
description: 'URI (or local path) to export dataset',
},
path: {
alias: `p`,
type: `string`,
alias: 'p',
type: 'string',
global: true,
default: `gs://nodejs-docs-samples-vcm/en-ja.csv`,
default: 'gs://nodejs-docs-samples-vcm/en-ja.csv',
requiresArg: true,
description: `URI or local path to input .csv, or array of .csv paths`,
description: 'URI or local path to input .csv, or array of .csv paths',
},
projectId: {
alias: `z`,
type: `number`,
alias: 'z',
type: 'number',
default: process.env.GCLOUD_PROJECT,
requiresArg: true,
description: `The GCLOUD_PROJECT string, e.g. "my-gcloud-project"`,
description: 'The GCLOUD_PROJECT string, e.g. "my-gcloud-project"',
},
source: {
alias: `s`,
type: `string`,
alias: 's',
type: 'string',
requiresArg: true,
description: `The source language to be translated from`,
description: 'The source language to be translated from',
},
target: {
alias: `t`,
type: `string`,
alias: 't',
type: 'string',
requiresArg: true,
description: `The target language to be translated to`,
description: 'The target language to be translated to',
},
})
.command(`createDataset`, `creates a new Dataset`, {}, opts =>
.command('createDataset', 'creates a new Dataset', {}, opts =>
createDataset(
opts.projectId,
opts.computeRegion,
Expand All @@ -300,24 +300,24 @@ require(`yargs`)
opts.target
)
)
.command(`list-datasets`, `list all Datasets`, {}, opts =>
.command('list-datasets', 'list all Datasets', {}, opts =>
listDatasets(opts.projectId, opts.computeRegion, opts.filter)
)
.command(`get-dataset`, `Get a Dataset`, {}, opts =>
.command('get-dataset', 'Get a Dataset', {}, opts =>
getDataset(opts.projectId, opts.computeRegion, opts.datasetId)
)
.command(`delete-dataset`, `Delete a dataset`, {}, opts =>
.command('delete-dataset', 'Delete a dataset', {}, opts =>
deleteDataset(opts.projectId, opts.computeRegion, opts.datasetId)
)
.command(`import-data`, `Import labeled items into dataset`, {}, opts =>
.command('import-data', 'Import labeled items into dataset', {}, opts =>
importData(opts.projectId, opts.computeRegion, opts.datasetId, opts.path)
)
.example(`node $0 create-dataset -n "newDataSet" -s "en" -t "ja"`)
.example(`node $0 list-datasets -f "translationDatasetMetadata:*"`)
.example(`node $0 get-dataset -i "DATASETID"`)
.example(`node $0 delete-dataset -i "DATASETID"`)
.example('node $0 create-dataset -n "newDataSet" -s "en" -t "ja"')
.example('node $0 list-datasets -f "translationDatasetMetadata:*"')
.example('node $0 get-dataset -i "DATASETID"')
.example('node $0 delete-dataset -i "DATASETID"')
.example(
`node $0 import-data -i "dataSetId" -p "gs://myproject/mytraindata.csv"`
'node $0 import-data -i "dataSetId" -p "gs://myproject/mytraindata.csv"'
)
.wrap(120)
.recommendCommands()
Expand Down
Loading

0 comments on commit 4a5f047

Please sign in to comment.