Skip to content

Instantly share code, notes, and snippets.

@anuragmishra1
Last active January 17, 2018 17:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save anuragmishra1/7b592ba17c347d608c9ebbf6c15faa37 to your computer and use it in GitHub Desktop.
Save anuragmishra1/7b592ba17c347d608c9ebbf6c15faa37 to your computer and use it in GitHub Desktop.
import json
from watson_developer_cloud import NaturalLanguageUnderstandingV1
from watson_developer_cloud.natural_language_understanding_v1 \
import Features, EntitiesOptions, KeywordsOptions
import time
from datetime import timedelta
import sys
import os
import argparse
#We need to get our API credentials in the code for authentication that we have stored as Environment Variables locally
NLP_USER_WATSON = os.environ.get("NLP_USER_WATSON")
NLP_PASS_WATSON = os.environ.get("NLP_PASS_WATSON")
NLP_VER_WATSON = os.environ.get("NLP_VER_WATSON")
#Following line is used to save all the console output into a text file
sys.stdout = open('nlp_api_output.txt', 'a')
start_time = time.monotonic()
def input_file(text_file_path):
global text
if os.path.isfile(text_file_path):
with open(text_file_path, 'r') as text_file:
text = text_file.read()
else:
print("File doesn't exist in the directory!")
def analyze_text():
#Initialize NaturalLanguageUnderstanding function using the API credentials
natural_language_understanding = NaturalLanguageUnderstandingV1(
username = NLP_USER_WATSON,
password = NLP_PASS_WATSON,
version = NLP_VER_WATSON)
response = natural_language_understanding.analyze(
text = text,
features = Features(
entities = EntitiesOptions(
emotion = True,
sentiment = True),
keywords = KeywordsOptions(
emotion = True,
sentiment = True)))
print(json.dumps(response, indent = 2)) #json output after textual analysis
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'text_file_path',
help = 'The complete file path of the text file you want to analyze.')
args = parser.parse_args()
input_file(args.text_file_path)
analyze_text()
end_time = time.monotonic()
print("Execution_Time:", timedelta(seconds = end_time - start_time))
print('\n')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment