Skip to content

Commit

Permalink
Add Cloud Client NL API samples.
Browse files Browse the repository at this point in the history
  • Loading branch information
jmdobry committed Dec 6, 2016
1 parent f122cbc commit 2a15659
Show file tree
Hide file tree
Showing 4 changed files with 251 additions and 2 deletions.
5 changes: 4 additions & 1 deletion language/cloud-client/README.rst.in
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ product:
name: Google Cloud Natural Language API
short_name: Cloud Natural Language API
url: https://cloud.google.com/natural-language/docs/
description: >
description: >
The `Google Cloud Natural Language API`_ provides natural language
understanding technologies to developers, including sentiment analysis,
entity recognition, and syntax analysis. This API is part of the larger
Expand All @@ -17,5 +17,8 @@ setup:
samples:
- name: Quickstart
file: quickstart.py
- name: Snippets
file: snippets.py
show_help: true

cloud_client_library: true
2 changes: 1 addition & 1 deletion language/cloud-client/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-cloud-language==0.21.0
google-cloud-language==0.22.0
172 changes: 172 additions & 0 deletions language/cloud-client/snippets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#!/usr/bin/env python

# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""This application demonstrates how to perform basic operations with the
Google Cloud Natural Language API
For more information, the documentation at
https://cloud.google.com/natural-language/docs.
"""

import argparse

from google.cloud import language


def sentiment_text(text):
"""Detects sentiment in the text."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_text(text)

# Detects sentiment in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
sentiment = document.analyze_sentiment()

print('Score: {}'.format(sentiment.score))
print('Magnitude: {}'.format(sentiment.magnitude))


def sentiment_file(gcs_uri):
"""Detects sentiment in the file located in Google Cloud Storage."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_url(gcs_uri)

# Detects sentiment in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
sentiment = document.analyze_sentiment()

print('Score: {}'.format(sentiment.score))
print('Magnitude: {}'.format(sentiment.magnitude))


def entities_text(text):
"""Detects entities in the text."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_text(text)

# Detects entities in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
entities = document.analyze_entities()

for entity in entities:
print('=' * 20)
print('{:<16}: {}'.format('name', entity.name))
print('{:<16}: {}'.format('type', entity.entity_type))
print('{:<16}: {}'.format('wikipedia_url', entity.wikipedia_url))
print('{:<16}: {}'.format('metadata', entity.metadata))
print('{:<16}: {}'.format('salience', entity.salience))


def entities_file(gcs_uri):
"""Detects entities in the file located in Google Cloud Storage."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_url(gcs_uri)

# Detects sentiment in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
entities = document.analyze_entities()

for entity in entities:
print('=' * 20)
print('{:<16}: {}'.format('name', entity.name))
print('{:<16}: {}'.format('type', entity.entity_type))
print('{:<16}: {}'.format('wikipedia_url', entity.wikipedia_url))
print('{:<16}: {}'.format('metadata', entity.metadata))
print('{:<16}: {}'.format('salience', entity.salience))


def syntax_text(text):
"""Detects syntax in the text."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_text(text)

# Detects syntax in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
tokens = document.analyze_syntax()

for token in tokens:
print('{}: {}'.format(token.part_of_speech, token.text_content))


def syntax_file(gcs_uri):
"""Detects syntax in the file located in Google Cloud Storage."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_url(gcs_uri)

# Detects syntax in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
tokens = document.analyze_syntax()

for token in tokens:
print('{}: {}'.format(token.part_of_speech, token.text_content))


if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers(dest='command')

sentiment_text_parser = subparsers.add_parser(
'sentiment-text', help=sentiment_text.__doc__)
sentiment_text_parser.add_argument('text')

sentiment_file_parser = subparsers.add_parser(
'sentiment-file', help=sentiment_file.__doc__)
sentiment_file_parser.add_argument('gcs_uri')

entities_text_parser = subparsers.add_parser(
'entities-text', help=entities_text.__doc__)
entities_text_parser.add_argument('text')

entities_file_parser = subparsers.add_parser(
'entities-file', help=entities_file.__doc__)
entities_file_parser.add_argument('gcs_uri')

syntax_text_parser = subparsers.add_parser(
'syntax-text', help=syntax_text.__doc__)
syntax_text_parser.add_argument('text')

syntax_file_parser = subparsers.add_parser(
'syntax-file', help=syntax_file.__doc__)
syntax_file_parser.add_argument('gcs_uri')

args = parser.parse_args()

if args.command == 'sentiment-text':
sentiment_text(args.text)
elif args.command == 'sentiment-file':
sentiment_file(args.gcs_uri)
elif args.command == 'entities-text':
entities_text(args.text)
elif args.command == 'entities-file':
entities_file(args.gcs_uri)
elif args.command == 'syntax-text':
syntax_text(args.text)
elif args.command == 'syntax-file':
syntax_file(args.gcs_uri)
74 changes: 74 additions & 0 deletions language/cloud-client/snippets_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import snippets


def test_sentiment_text(cloud_config, capsys):
snippets.sentiment_text('President Obama is speaking at the White House.')
out, _ = capsys.readouterr()
assert 'Score: 1' in out


def test_sentiment_file(cloud_config, capsys):
cloud_storage_input_uri = 'gs://{}/test.txt'.format(
cloud_config.storage_bucket)
snippets.sentiment_file(cloud_storage_input_uri)
out, _ = capsys.readouterr()
assert 'Score: 1' in out


def test_entities_text(cloud_config, capsys):
snippets.entities_text('President Obama is speaking at the White House.')
out, _ = capsys.readouterr()
assert 'name: Obama' in out


def test_entities_file(cloud_config, capsys):
cloud_storage_input_uri = 'gs://{}/test.txt'.format(
cloud_config.storage_bucket)
snippets.entities_file(cloud_storage_input_uri)
out, _ = capsys.readouterr()
assert 'name: Obama' in out


def test_syntax_text(cloud_config, capsys):
snippets.syntax_text('President Obama is speaking at the White House.')
out, _ = capsys.readouterr()
assert 'NOUN: President
'NOUN: Obama'
'VERB: is'
'VERB: speaking'
'ADP: at'
'DET: the'
'NOUN: White'
'NOUN: House'
'PUNCT: .' in out


def test_syntax_file(cloud_config, capsys):
cloud_storage_input_uri = 'gs://{}/test.txt'.format(
cloud_config.storage_bucket)
snippets.syntax_file(cloud_storage_input_uri)
out, _ = capsys.readouterr()
assert 'NOUN: President
'NOUN: Obama'
'VERB: is'
'VERB: speaking'
'ADP: at'
'DET: the'
'NOUN: White'
'NOUN: House'
'PUNCT: .' in out

0 comments on commit 2a15659

Please sign in to comment.