Skip to content

Commit

Permalink
Add Cloud Client NL API samples. [(#668)](GoogleCloudPlatform/python-…
Browse files Browse the repository at this point in the history
  • Loading branch information
jmdobry authored and Jon Wayne Parrott committed Dec 6, 2016
1 parent 7fb5eb5 commit 714cc10
Show file tree
Hide file tree
Showing 6 changed files with 239 additions and 3 deletions.
5 changes: 4 additions & 1 deletion samples/snippets/cloud-client/README.rst.in
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ product:
name: Google Cloud Natural Language API
short_name: Cloud Natural Language API
url: https://cloud.google.com/natural-language/docs/
description: >
description: >
The `Google Cloud Natural Language API`_ provides natural language
understanding technologies to developers, including sentiment analysis,
entity recognition, and syntax analysis. This API is part of the larger
Expand All @@ -17,5 +17,8 @@ setup:
samples:
- name: Quickstart
file: quickstart.py
- name: Snippets
file: snippets.py
show_help: true

cloud_client_library: true
2 changes: 1 addition & 1 deletion samples/snippets/cloud-client/quickstart.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def run_quickstart():
sentiment = document.analyze_sentiment()

print('Text: {}'.format(text))
print('Sentiment: {}, {}'.format(sentiment.polarity, sentiment.magnitude))
print('Sentiment: {}, {}'.format(sentiment.score, sentiment.magnitude))
# [END language_quickstart]


Expand Down
2 changes: 1 addition & 1 deletion samples/snippets/cloud-client/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-cloud-language==0.21.0
google-cloud-language==0.22.0
1 change: 1 addition & 0 deletions samples/snippets/cloud-client/resources/text.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
President Obama is speaking at the White House.
172 changes: 172 additions & 0 deletions samples/snippets/cloud-client/snippets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#!/usr/bin/env python

# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""This application demonstrates how to perform basic operations with the
Google Cloud Natural Language API
For more information, the documentation at
https://cloud.google.com/natural-language/docs.
"""

import argparse

from google.cloud import language


def sentiment_text(text):
"""Detects sentiment in the text."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_text(text)

# Detects sentiment in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
sentiment = document.analyze_sentiment()

print('Score: {}'.format(sentiment.score))
print('Magnitude: {}'.format(sentiment.magnitude))


def sentiment_file(gcs_uri):
"""Detects sentiment in the file located in Google Cloud Storage."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_url(gcs_uri)

# Detects sentiment in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
sentiment = document.analyze_sentiment()

print('Score: {}'.format(sentiment.score))
print('Magnitude: {}'.format(sentiment.magnitude))


def entities_text(text):
"""Detects entities in the text."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_text(text)

# Detects entities in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
entities = document.analyze_entities()

for entity in entities:
print('=' * 20)
print('{:<16}: {}'.format('name', entity.name))
print('{:<16}: {}'.format('type', entity.entity_type))
print('{:<16}: {}'.format('wikipedia_url', entity.wikipedia_url))
print('{:<16}: {}'.format('metadata', entity.metadata))
print('{:<16}: {}'.format('salience', entity.salience))


def entities_file(gcs_uri):
"""Detects entities in the file located in Google Cloud Storage."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_url(gcs_uri)

# Detects sentiment in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
entities = document.analyze_entities()

for entity in entities:
print('=' * 20)
print('{:<16}: {}'.format('name', entity.name))
print('{:<16}: {}'.format('type', entity.entity_type))
print('{:<16}: {}'.format('wikipedia_url', entity.wikipedia_url))
print('{:<16}: {}'.format('metadata', entity.metadata))
print('{:<16}: {}'.format('salience', entity.salience))


def syntax_text(text):
"""Detects syntax in the text."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_text(text)

# Detects syntax in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
tokens = document.analyze_syntax()

for token in tokens:
print('{}: {}'.format(token.part_of_speech, token.text_content))


def syntax_file(gcs_uri):
"""Detects syntax in the file located in Google Cloud Storage."""
language_client = language.Client()

# Instantiates a plain text document.
document = language_client.document_from_url(gcs_uri)

# Detects syntax in the document. You can also analyze HTML with:
# document.doc_type == language.Document.HTML
tokens = document.analyze_syntax()

for token in tokens:
print('{}: {}'.format(token.part_of_speech, token.text_content))


if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers(dest='command')

sentiment_text_parser = subparsers.add_parser(
'sentiment-text', help=sentiment_text.__doc__)
sentiment_text_parser.add_argument('text')

sentiment_file_parser = subparsers.add_parser(
'sentiment-file', help=sentiment_file.__doc__)
sentiment_file_parser.add_argument('gcs_uri')

entities_text_parser = subparsers.add_parser(
'entities-text', help=entities_text.__doc__)
entities_text_parser.add_argument('text')

entities_file_parser = subparsers.add_parser(
'entities-file', help=entities_file.__doc__)
entities_file_parser.add_argument('gcs_uri')

syntax_text_parser = subparsers.add_parser(
'syntax-text', help=syntax_text.__doc__)
syntax_text_parser.add_argument('text')

syntax_file_parser = subparsers.add_parser(
'syntax-file', help=syntax_file.__doc__)
syntax_file_parser.add_argument('gcs_uri')

args = parser.parse_args()

if args.command == 'sentiment-text':
sentiment_text(args.text)
elif args.command == 'sentiment-file':
sentiment_file(args.gcs_uri)
elif args.command == 'entities-text':
entities_text(args.text)
elif args.command == 'entities-file':
entities_file(args.gcs_uri)
elif args.command == 'syntax-text':
syntax_text(args.text)
elif args.command == 'syntax-file':
syntax_file(args.gcs_uri)
60 changes: 60 additions & 0 deletions samples/snippets/cloud-client/snippets_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import snippets


def test_sentiment_text(cloud_config, capsys):
snippets.sentiment_text('President Obama is speaking at the White House.')
out, _ = capsys.readouterr()
assert 'Score: 0.2' in out


def test_sentiment_file(cloud_config, capsys):
cloud_storage_input_uri = 'gs://{}/text.txt'.format(
cloud_config.storage_bucket)
snippets.sentiment_file(cloud_storage_input_uri)
out, _ = capsys.readouterr()
assert 'Score: 0.2' in out


def test_entities_text(cloud_config, capsys):
snippets.entities_text('President Obama is speaking at the White House.')
out, _ = capsys.readouterr()
assert 'name' in out
assert ': Obama' in out


def test_entities_file(cloud_config, capsys):
cloud_storage_input_uri = 'gs://{}/text.txt'.format(
cloud_config.storage_bucket)
snippets.entities_file(cloud_storage_input_uri)
out, _ = capsys.readouterr()
assert 'name' in out
assert ': Obama' in out


def test_syntax_text(cloud_config, capsys):
snippets.syntax_text('President Obama is speaking at the White House.')
out, _ = capsys.readouterr()
assert 'NOUN: President' in out


def test_syntax_file(cloud_config, capsys):
cloud_storage_input_uri = 'gs://{}/text.txt'.format(
cloud_config.storage_bucket)
snippets.syntax_file(cloud_storage_input_uri)
out, _ = capsys.readouterr()
assert 'NOUN: President' in out

0 comments on commit 714cc10

Please sign in to comment.