Skip to content

Commit a8e7c19

Browse files
author
Ace Nassri
authored
Split analyze.js into two separate samples (#360)
1 parent ce94259 commit a8e7c19

File tree

4 files changed

+391
-1
lines changed

4 files changed

+391
-1
lines changed

language/analyze.v1beta1.js

Lines changed: 291 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,291 @@
1+
/**
2+
* Copyright 2017, Google, Inc.
3+
* Licensed under the Apache License, Version 2.0 (the "License");
4+
* you may not use this file except in compliance with the License.
5+
* You may obtain a copy of the License at
6+
*
7+
* http://www.apache.org/licenses/LICENSE-2.0
8+
*
9+
* Unless required by applicable law or agreed to in writing, software
10+
* distributed under the License is distributed on an "AS IS" BASIS,
11+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
* See the License for the specific language governing permissions and
13+
* limitations under the License.
14+
*/
15+
16+
'use strict';
17+
18+
function analyzeSentimentOfText (text) {
19+
// [START language_sentiment_string]
20+
// Imports the Google Cloud client library
21+
const Language = require('@google-cloud/language');
22+
23+
// Instantiates a client
24+
const language = Language();
25+
26+
// The text to analyze, e.g. "Hello, world!"
27+
// const text = 'Hello, world!';
28+
29+
// Instantiates a Document, representing the provided text
30+
const document = language.document({ content: text });
31+
32+
// Detects the sentiment of the document
33+
document.detectSentiment()
34+
.then((results) => {
35+
const sentiment = results[1].documentSentiment;
36+
console.log(`Document sentiment:`);
37+
console.log(` Score: ${sentiment.score}`);
38+
console.log(` Magnitude: ${sentiment.magnitude}`);
39+
40+
const sentences = results[1].sentences;
41+
sentences.forEach((sentence) => {
42+
console.log(`Sentence: ${sentence.text.content}`);
43+
console.log(` Score: ${sentence.sentiment.score}`);
44+
console.log(` Magnitude: ${sentence.sentiment.magnitude}`);
45+
});
46+
})
47+
.catch((err) => {
48+
console.error('ERROR:', err);
49+
});
50+
// [END language_sentiment_string]
51+
}
52+
53+
function analyzeSentimentInFile (bucketName, fileName) {
54+
// [START language_sentiment_file]
55+
// Imports the Google Cloud client libraries
56+
const Language = require('@google-cloud/language');
57+
const Storage = require('@google-cloud/storage');
58+
59+
// Instantiates the clients
60+
const language = Language();
61+
const storage = Storage();
62+
63+
// The name of the bucket where the file resides, e.g. "my-bucket"
64+
// const bucketName = 'my-bucket';
65+
66+
// The name of the file to analyze, e.g. "file.txt"
67+
// const fileName = 'file.txt';
68+
69+
// Instantiates a Document, representing a text file in Cloud Storage
70+
const document = language.document({
71+
// The Google Cloud Storage file
72+
content: storage.bucket(bucketName).file(fileName)
73+
});
74+
75+
// Detects the sentiment of the document
76+
document.detectSentiment()
77+
.then((results) => {
78+
const sentiment = results[1].documentSentiment;
79+
console.log(`Document sentiment:`);
80+
console.log(` Score: ${sentiment.score}`);
81+
console.log(` Magnitude: ${sentiment.magnitude}`);
82+
83+
const sentences = results[1].sentences;
84+
sentences.forEach((sentence) => {
85+
console.log(`Sentence: ${sentence.text.content}`);
86+
console.log(` Score: ${sentence.sentiment.score}`);
87+
console.log(` Magnitude: ${sentence.sentiment.magnitude}`);
88+
});
89+
})
90+
.catch((err) => {
91+
console.error('ERROR:', err);
92+
});
93+
// [END language_sentiment_file]
94+
}
95+
96+
function analyzeEntitiesOfText (text) {
97+
// [START language_entities_string]
98+
// Imports the Google Cloud client library
99+
const Language = require('@google-cloud/language');
100+
101+
// Instantiates a client
102+
const language = Language();
103+
104+
// The text to analyze, e.g. "Hello, world!"
105+
// const text = 'Hello, world!';
106+
107+
// Instantiates a Document, representing the provided text
108+
const document = language.document({ content: text });
109+
110+
// Detects entities in the document
111+
document.detectEntities()
112+
.then((results) => {
113+
const entities = results[1].entities;
114+
115+
console.log('Entities:');
116+
entities.forEach((entity) => {
117+
console.log(entity.name);
118+
console.log(` - Type: ${entity.type}, Salience: ${entity.salience}`);
119+
if (entity.metadata && entity.metadata.wikipedia_url) {
120+
console.log(` - Wikipedia URL: ${entity.metadata.wikipedia_url}$`);
121+
}
122+
});
123+
})
124+
.catch((err) => {
125+
console.error('ERROR:', err);
126+
});
127+
// [END language_entities_string]
128+
}
129+
130+
function analyzeEntitiesInFile (bucketName, fileName) {
131+
// [START language_entities_file]
132+
// Imports the Google Cloud client libraries
133+
const Language = require('@google-cloud/language');
134+
const Storage = require('@google-cloud/storage');
135+
136+
// Instantiates the clients
137+
const language = Language();
138+
const storage = Storage();
139+
140+
// The name of the bucket where the file resides, e.g. "my-bucket"
141+
// const bucketName = 'my-bucket';
142+
143+
// The name of the file to analyze, e.g. "file.txt"
144+
// const fileName = 'file.txt';
145+
146+
// Instantiates a Document, representing a text file in Cloud Storage
147+
const document = language.document({
148+
// The Google Cloud Storage file
149+
content: storage.bucket(bucketName).file(fileName)
150+
});
151+
152+
// Detects entities in the document
153+
document.detectEntities()
154+
.then((results) => {
155+
const entities = results[0];
156+
157+
console.log('Entities:');
158+
entities.forEach((entity) => {
159+
console.log(entity.name);
160+
console.log(` - Type: ${entity.type}, Salience: ${entity.salience}`);
161+
if (entity.metadata && entity.metadata.wikipedia_url) {
162+
console.log(` - Wikipedia URL: ${entity.metadata.wikipedia_url}$`);
163+
}
164+
});
165+
})
166+
.catch((err) => {
167+
console.error('ERROR:', err);
168+
});
169+
// [END language_entities_file]
170+
}
171+
172+
function analyzeSyntaxOfText (text) {
173+
// [START language_syntax_string]
174+
// Imports the Google Cloud client library
175+
const Language = require('@google-cloud/language');
176+
177+
// Instantiates a client
178+
const language = Language();
179+
180+
// The text to analyze, e.g. "Hello, world!"
181+
// const text = 'Hello, world!';
182+
183+
// Instantiates a Document, representing the provided text
184+
const document = language.document({ content: text });
185+
186+
// Detects syntax in the document
187+
document.detectSyntax()
188+
.then((results) => {
189+
const syntax = results[0];
190+
191+
console.log('Parts of speech:');
192+
syntax.forEach((part) => {
193+
console.log(`${part.partOfSpeech.tag}: ${part.text.content}`);
194+
console.log(`Morphology:`, part.partOfSpeech);
195+
});
196+
})
197+
.catch((err) => {
198+
console.error('ERROR:', err);
199+
});
200+
// [END language_syntax_string]
201+
}
202+
203+
function analyzeSyntaxInFile (bucketName, fileName) {
204+
// [START language_syntax_file]
205+
// Imports the Google Cloud client libraries
206+
const Language = require('@google-cloud/language');
207+
const Storage = require('@google-cloud/storage');
208+
209+
// Instantiates the clients
210+
const language = Language();
211+
const storage = Storage();
212+
213+
// The name of the bucket where the file resides, e.g. "my-bucket"
214+
// const bucketName = 'my-bucket';
215+
216+
// The name of the file to analyze, e.g. "file.txt"
217+
// const fileName = 'file.txt';
218+
219+
// Instantiates a Document, representing a text file in Cloud Storage
220+
const document = language.document({
221+
// The Google Cloud Storage file
222+
content: storage.bucket(bucketName).file(fileName)
223+
});
224+
225+
// Detects syntax in the document
226+
document.detectSyntax()
227+
.then((results) => {
228+
const syntax = results[0];
229+
230+
console.log('Parts of speech:');
231+
syntax.forEach((part) => {
232+
console.log(`${part.partOfSpeech.tag}: ${part.text.content}`);
233+
console.log(`Morphology:`, part.partOfSpeech);
234+
});
235+
})
236+
.catch((err) => {
237+
console.error('ERROR:', err);
238+
});
239+
// [END language_syntax_file]
240+
}
241+
242+
require(`yargs`) // eslint-disable-line
243+
.demand(1)
244+
.command(
245+
`sentiment-text <text>`,
246+
`Detects sentiment of a string.`,
247+
{},
248+
(opts) => analyzeSentimentOfText(opts.text)
249+
)
250+
.command(
251+
`sentiment-file <bucketName> <fileName>`,
252+
`Detects sentiment in a file in Google Cloud Storage.`,
253+
{},
254+
(opts) => analyzeSentimentInFile(opts.bucketName, opts.fileName)
255+
)
256+
.command(
257+
`entities-text <text>`,
258+
`Detects entities in a string.`,
259+
{},
260+
(opts) => analyzeEntitiesOfText(opts.text)
261+
)
262+
.command(
263+
`entities-file <bucketName> <fileName>`,
264+
`Detects entities in a file in Google Cloud Storage.`,
265+
{},
266+
(opts) => analyzeEntitiesInFile(opts.bucketName, opts.fileName)
267+
)
268+
.command(
269+
`syntax-text <text>`,
270+
`Detects syntax of a string.`,
271+
{},
272+
(opts) => analyzeSyntaxOfText(opts.text)
273+
)
274+
.command(
275+
`syntax-file <bucketName> <fileName>`,
276+
`Detects syntax in a file in Google Cloud Storage.`,
277+
{},
278+
(opts) => analyzeSyntaxInFile(opts.bucketName, opts.fileName)
279+
)
280+
.example(`node $0 sentiment-text "President Obama is speaking at the White House."`)
281+
.example(`node $0 sentiment-file my-bucket file.txt`, `Detects sentiment in gs://my-bucket/file.txt`)
282+
.example(`node $0 entities-text "President Obama is speaking at the White House."`)
283+
.example(`node $0 entities-file my-bucket file.txt`, `Detects entities in gs://my-bucket/file.txt`)
284+
.example(`node $0 syntax-text "President Obama is speaking at the White House."`)
285+
.example(`node $0 syntax-file my-bucket file.txt`, `Detects syntax in gs://my-bucket/file.txt`)
286+
.wrap(120)
287+
.recommendCommands()
288+
.epilogue(`For more information, see https://cloud.google.com/natural-language/docs`)
289+
.help()
290+
.strict()
291+
.argv;
File renamed without changes.
Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
/**
2+
* Copyright 2017, Google, Inc.
3+
* Licensed under the Apache License, Version 2.0 (the "License");
4+
* you may not use this file except in compliance with the License.
5+
* You may obtain a copy of the License at
6+
*
7+
* http://www.apache.org/licenses/LICENSE-2.0
8+
*
9+
* Unless required by applicable law or agreed to in writing, software
10+
* distributed under the License is distributed on an "AS IS" BASIS,
11+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
* See the License for the specific language governing permissions and
13+
* limitations under the License.
14+
*/
15+
16+
'use strict';
17+
18+
const path = require(`path`);
19+
const storage = require(`@google-cloud/storage`)();
20+
const test = require(`ava`);
21+
const tools = require(`@google-cloud/nodejs-repo-tools`);
22+
const uuid = require(`uuid`);
23+
24+
const cmd = `node analyze.v1beta1.js`;
25+
const cwd = path.join(__dirname, `..`);
26+
const bucketName = `nodejs-docs-samples-test-${uuid.v4()}`;
27+
const fileName = `text.txt`;
28+
const localFilePath = path.join(__dirname, `../resources/text.txt`);
29+
const text = `President Obama is speaking at the White House.`;
30+
31+
test.before(async () => {
32+
tools.checkCredentials();
33+
const [bucket] = await storage.createBucket(bucketName);
34+
await bucket.upload(localFilePath);
35+
});
36+
37+
test.after.always(async () => {
38+
const bucket = storage.bucket(bucketName);
39+
await bucket.deleteFiles({ force: true });
40+
await bucket.deleteFiles({ force: true }); // Try a second time...
41+
await bucket.delete();
42+
});
43+
44+
test.beforeEach(tools.stubConsole);
45+
test.afterEach.always(tools.restoreConsole);
46+
47+
test(`should analyze sentiment in text`, async (t) => {
48+
const output = await tools.runAsync(`${cmd} sentiment-text "${text}"`, cwd);
49+
t.true(output.includes(`Document sentiment:`));
50+
t.true(output.includes(`Sentence: ${text}`));
51+
t.true(output.includes(`Score: 0`));
52+
t.true(output.includes(`Magnitude: 0`));
53+
});
54+
55+
test(`should analyze sentiment in a file`, async (t) => {
56+
const output = await tools.runAsync(`${cmd} sentiment-file ${bucketName} ${fileName}`, cwd);
57+
t.true(output.includes(`Document sentiment:`));
58+
t.true(output.includes(`Sentence: ${text}`));
59+
t.true(output.includes(`Score: 0`));
60+
t.true(output.includes(`Magnitude: 0`));
61+
});
62+
63+
test(`should analyze entities in text`, async (t) => {
64+
const output = await tools.runAsync(`${cmd} entities-text "${text}"`, cwd);
65+
t.true(output.includes(`Obama`));
66+
t.true(output.includes(`Type: PERSON`));
67+
t.true(output.includes(`White House`));
68+
t.true(output.includes(`Type: LOCATION`));
69+
t.true(output.includes(`/wiki/Barack_Obama`));
70+
});
71+
72+
test('should analyze entities in a file', async (t) => {
73+
const output = await tools.runAsync(`${cmd} entities-file ${bucketName} ${fileName}`, cwd);
74+
t.true(output.includes(`Entities:`));
75+
t.true(output.includes(`Obama`));
76+
t.true(output.includes(`Type: PERSON`));
77+
t.true(output.includes(`White House`));
78+
t.true(output.includes(`Type: LOCATION`));
79+
t.true(output.includes(`/wiki/Barack_Obama`));
80+
});
81+
82+
test(`should analyze syntax in text`, async (t) => {
83+
const output = await tools.runAsync(`${cmd} syntax-text "${text}"`, cwd);
84+
t.true(output.includes(`Parts of speech:`));
85+
t.true(output.includes(`NOUN:`));
86+
t.true(output.includes(`President`));
87+
t.true(output.includes(`Obama`));
88+
t.true(output.includes(`Morphology:`));
89+
t.true(output.includes(`tag: 'NOUN'`));
90+
});
91+
92+
test('should analyze syntax in a file', async (t) => {
93+
const output = await tools.runAsync(`${cmd} syntax-file ${bucketName} ${fileName}`, cwd);
94+
t.true(output.includes(`NOUN:`));
95+
t.true(output.includes(`President`));
96+
t.true(output.includes(`Obama`));
97+
t.true(output.includes(`Morphology:`));
98+
t.true(output.includes(`tag: 'NOUN'`));
99+
});

0 commit comments

Comments
 (0)