Skip to content

Moved the imports and region tags inside the functions #1891

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Dec 4, 2018
Merged
174 changes: 107 additions & 67 deletions language/cloud-client/v1/snippets.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python

# Copyright 2016 Google, Inc.
# Copyright 2018 Google, LLC.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For the future, I've been told we only need to update this for new files.
It's just LLC no .

#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -24,15 +24,16 @@
import argparse
import sys

from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types
import six

def sentiment_text():
# [START language_sentiment_text]
import six
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

text = 'Hello, world!'

# [START language_sentiment_text]
def sentiment_text(text):
"""Detects sentiment in the text."""
client = language.LanguageServiceClient()

if isinstance(text, six.binary_type):
Expand All @@ -51,12 +52,17 @@ def sentiment_text(text):
print('Score: {}'.format(sentiment.score))
print('Magnitude: {}'.format(sentiment.magnitude))
# [END language_python_migration_sentiment_text]
# [END language_sentiment_text]
# [END language_sentiment_text]


def sentiment_file():
# [START language_sentiment_gcs]
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

gcs_uri = 'gs://cloud-samples-data/language/hello.txt'

# [START language_sentiment_gcs]
def sentiment_file(gcs_uri):
"""Detects sentiment in the file located in Google Cloud Storage."""
client = language.LanguageServiceClient()

# Instantiates a plain text document.
Expand All @@ -72,12 +78,18 @@ def sentiment_file(gcs_uri):

print('Score: {}'.format(sentiment.score))
print('Magnitude: {}'.format(sentiment.magnitude))
# [END language_sentiment_gcs]
# [END language_sentiment_gcs]


def entities_text():
# [START language_entities_text]
import six
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

text = 'President Kennedy spoke at the White House.'

# [START language_entities_text]
def entities_text(text):
"""Detects entities in the text."""
client = language.LanguageServiceClient()

if isinstance(text, six.binary_type):
Expand Down Expand Up @@ -105,12 +117,17 @@ def entities_text(text):
print(u'{:<16}: {}'.format('wikipedia_url',
entity.metadata.get('wikipedia_url', '-')))
# [END language_python_migration_entities_text]
# [END language_entities_text]
# [END language_entities_text]


# [START language_entities_gcs]
def entities_file(gcs_uri):
"""Detects entities in the file located in Google Cloud Storage."""
def entities_file():
# [START language_entities_gcs]
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

gcs_uri = 'gs://cloud-samples-data/language/president.txt'

client = language.LanguageServiceClient()

# Instantiates a plain text document.
Expand All @@ -131,12 +148,18 @@ def entities_file(gcs_uri):
print(u'{:<16}: {}'.format('salience', entity.salience))
print(u'{:<16}: {}'.format('wikipedia_url',
entity.metadata.get('wikipedia_url', '-')))
# [END language_entities_gcs]
# [END language_entities_gcs]


def syntax_text():
# [START language_syntax_text]
import six
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

text = 'President Kennedy spoke at the White House.'

# [START language_syntax_text]
def syntax_text(text):
"""Detects syntax in the text."""
client = language.LanguageServiceClient()

if isinstance(text, six.binary_type):
Expand All @@ -157,12 +180,17 @@ def syntax_text(text):
print(u'{}: {}'.format(part_of_speech_tag.name,
token.text.content))
# [END language_python_migration_syntax_text]
# [END language_syntax_text]
# [END language_syntax_text]


def syntax_file():
# [START language_syntax_gcs]
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

gcs_uri = 'gs://cloud-samples-data/language/president.txt'

# [START language_syntax_gcs]
def syntax_file(gcs_uri):
"""Detects syntax in the file located in Google Cloud Storage."""
client = language.LanguageServiceClient()

# Instantiates a plain text document.
Expand All @@ -178,12 +206,18 @@ def syntax_file(gcs_uri):
part_of_speech_tag = enums.PartOfSpeech.Tag(token.part_of_speech.tag)
print(u'{}: {}'.format(part_of_speech_tag.name,
token.text.content))
# [END language_syntax_gcs]
# [END language_syntax_gcs]


def entity_sentiment_text():
# [START language_entity_sentiment_text]
import six
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

text = 'President Kennedy spoke at the White House.'

# [START language_entity_sentiment_text]
def entity_sentiment_text(text):
"""Detects entity sentiment in the provided text."""
client = language.LanguageServiceClient()

if isinstance(text, six.binary_type):
Expand Down Expand Up @@ -211,12 +245,17 @@ def entity_sentiment_text(text):
print(u' Type : {}'.format(mention.type))
print(u'Salience: {}'.format(entity.salience))
print(u'Sentiment: {}\n'.format(entity.sentiment))
# [END language_entity_sentiment_text]
# [END language_entity_sentiment_text]


def entity_sentiment_file():
# [START language_entity_sentiment_gcs]
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

gcs_uri = 'gs://cloud-samples-data/language/president.txt'

# [START language_entity_sentiment_gcs]
def entity_sentiment_file(gcs_uri):
"""Detects entity sentiment in a Google Cloud Storage file."""
client = language.LanguageServiceClient()

document = types.Document(
Expand All @@ -240,12 +279,20 @@ def entity_sentiment_file(gcs_uri):
print(u' Type : {}'.format(mention.type))
print(u'Salience: {}'.format(entity.salience))
print(u'Sentiment: {}\n'.format(entity.sentiment))
# [END language_entity_sentiment_gcs]
# [END language_entity_sentiment_gcs]


# [START language_classify_text]
def classify_text(text):
"""Classifies content categories of the provided text."""
def classify_text():
# [START language_classify_text]
import six
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

text = 'Android is a mobile operating system developed by Google, ' + \
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The + signs are not necessary here. The string will continue because of the \.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Very interesting. I did not know this little trick. Thanks. :)

'based on the Linux kernel and designed primarily for ' + \
'touchscreen mobile devices such as smartphones and tablets.'

client = language.LanguageServiceClient()

if isinstance(text, six.binary_type):
Expand All @@ -261,14 +308,17 @@ def classify_text(text):
print(u'=' * 20)
print(u'{:<16}: {}'.format('name', category.name))
print(u'{:<16}: {}'.format('confidence', category.confidence))
# [END language_classify_text]
# [END language_classify_text]


def classify_file():
# [START language_classify_gcs]
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types

gcs_uri = 'gs://cloud-samples-data/language/android.txt'

# [START language_classify_gcs]
def classify_file(gcs_uri):
"""Classifies content categories of the text in a Google Cloud Storage
file.
"""
client = language.LanguageServiceClient()

document = types.Document(
Expand All @@ -281,7 +331,7 @@ def classify_file(gcs_uri):
print(u'=' * 20)
print(u'{:<16}: {}'.format('name', category.name))
print(u'{:<16}: {}'.format('confidence', category.confidence))
# [END language_classify_gcs]
# [END language_classify_gcs]


if __name__ == '__main__':
Expand All @@ -292,63 +342,53 @@ def classify_file(gcs_uri):

classify_text_parser = subparsers.add_parser(
'classify-text', help=classify_text.__doc__)
classify_text_parser.add_argument('text')

classify_text_parser = subparsers.add_parser(
'classify-file', help=classify_file.__doc__)
classify_text_parser.add_argument('gcs_uri')

sentiment_entities_text_parser = subparsers.add_parser(
'sentiment-entities-text', help=entity_sentiment_text.__doc__)
sentiment_entities_text_parser.add_argument('text')

sentiment_entities_file_parser = subparsers.add_parser(
'sentiment-entities-file', help=entity_sentiment_file.__doc__)
sentiment_entities_file_parser.add_argument('gcs_uri')

sentiment_text_parser = subparsers.add_parser(
'sentiment-text', help=sentiment_text.__doc__)
sentiment_text_parser.add_argument('text')

sentiment_file_parser = subparsers.add_parser(
'sentiment-file', help=sentiment_file.__doc__)
sentiment_file_parser.add_argument('gcs_uri')

entities_text_parser = subparsers.add_parser(
'entities-text', help=entities_text.__doc__)
entities_text_parser.add_argument('text')

entities_file_parser = subparsers.add_parser(
'entities-file', help=entities_file.__doc__)
entities_file_parser.add_argument('gcs_uri')

syntax_text_parser = subparsers.add_parser(
'syntax-text', help=syntax_text.__doc__)
syntax_text_parser.add_argument('text')

syntax_file_parser = subparsers.add_parser(
'syntax-file', help=syntax_file.__doc__)
syntax_file_parser.add_argument('gcs_uri')

args = parser.parse_args()

if args.command == 'sentiment-text':
sentiment_text(args.text)
sentiment_text()
elif args.command == 'sentiment-file':
sentiment_file(args.gcs_uri)
sentiment_file()
elif args.command == 'entities-text':
entities_text(args.text)
entities_text()
elif args.command == 'entities-file':
entities_file(args.gcs_uri)
entities_file()
elif args.command == 'syntax-text':
syntax_text(args.text)
syntax_text()
elif args.command == 'syntax-file':
syntax_file(args.gcs_uri)
syntax_file()
elif args.command == 'sentiment-entities-text':
entity_sentiment_text(args.text)
entity_sentiment_text()
elif args.command == 'sentiment-entities-file':
entity_sentiment_file(args.gcs_uri)
entity_sentiment_file()
elif args.command == 'classify-text':
classify_text(args.text)
classify_text()
elif args.command == 'classify-file':
classify_file(args.gcs_uri)
classify_file()
Loading