Skip to content

Instantly share code, notes, and snippets.

@mssfang
Last active November 24, 2019 17:35
Show Gist options
  • Save mssfang/89f7a3dadff18499c745a5317bbd1a5f to your computer and use it in GitHub Desktop.
Save mssfang/89f7a3dadff18499c745a5317bbd1a5f to your computer and use it in GitHub Desktop.
Text Analytic Preview 1 APIs for Java (Draft)

TextAnalytics

TextAnalyticsClientBuilder

`public final class TextAnalyticsClientBuilder {

    public TextAnalyticsClientBuilder() {}

    public TextAnalyticsClient buildClient() {}

    public TextAnalyticsAsyncClient buildAsyncClient() {}

    public TextAnalyticsClientBuilder endpoint(String endpoint) {}
    public TextAnalyticsClientBuilder subscriptionKey(String subscriptionKey) {}
    public TextAnalyticsClientBuilder credential(TokenCredential tokenCredential) {}
    public TextAnalyticsClientBuilder httpLogOptions(HttpLogOptions logOptions) {}
    public TextAnalyticsClientBuilder addPolicy(HttpPipelinePolicy policy) {}
    public TextAnalyticsClientBuilder httpClient(HttpClient client) {}
    public TextAnalyticsClientBuilder pipeline(HttpPipeline pipeline) {}
    public TextAnalyticsClientBuilder configuration(Configuration configuration) {}
    public TextAnalyticsClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) {}
    public TextAnalyticsCli`entBuilder serviceVersion(ConfigurationServiceVersion version) {}
    public TextAnalyticsClientBuilder clientOptions(TextAnalyticsClientOptions clientOptions) {}
}

TextAnalyticsAsyncClient

public final class TextAnalyticsAsyncClient {
    // (1) language
    public Mono<DetectLanguageResult> detectLanguage(String text) {}
    public Mono<DetectLanguageResult> detectLanguage(String text, String countryHint) {}
    public Mono<Response<DetectLanguageResult>> detectLanguageWithResponse(String text, String countryHint) {}
    
    public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguages(List<String> inputs) {}
    public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguages(List<String> inputs, String countryHint) {}
    
    public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguages(List<DetectLangaugeInput> inputs) {}
    public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguages(
        List<DetectLangaugeInput> inputs, TextAnalyticsRequestOptions options) {}
    public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguagesWithResponse(
        List<DetectLangaugeInput> inputs, TextAnalyticsRequestOptions options) {}
    
    // (2) entities
    public PagedFlux<NamedEntityResult> recognizeEntities(String text) {}
    public PagedFlux<NamedEntityResult> recognizeEntities(String text, String language) {}
    
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizeEntities(List<String> inputs) {}
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizeEntities(List<String> inputs, String language) {}

    public Mono<DocumentResultCollection<NamedEntityResult>> recognizeEntities(List<TextDocumentInput> inputs) {}
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizeEntities(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
    public Mono<Response<DocumentResultCollection<NamedEntityResult>>> recognizeEntitiesWithResponse(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}

    // (3) PII entities
    public PagedFlux<NamedEntityResult> recognizePiiEntities(String text) {}
    public PagedFlux<NamedEntityResult> recognizePiiEntities(String text, String language) {}
    
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizePiiEntities(List<String> inputs) {}
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizePiiEntities(List<String> inputs, String language) {}
    
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizePiiEntities(List<TextDocumentInput> inputs) {}
    public Mono<DocumentResultCollection<NamedEntityResult>> recognizePiiEntities(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
    public Mono<Response<DocumentResultCollection<NamedEntityResult>>> recognizePiiEntitiesWithResponse(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
        
    // (4) Link entities
    public PagedFlux<LinkedEntityResult> recognizeLinkedEntities(String text) {}
    public PagedFlux<LinkedEntityResult> recognizeLinkedEntities(String text, String language) {}
    
    public Mono<DocumentResultCollection<LinkedEntityResult>> recognizeLinkedEntities(List<String> inputs) {}
    public Mono<DocumentResultCollection<LinkedEntityResult>> recognizeLinkedEntities(List<String> inputs, String language) {}
    
    public Mono<DocumentResultCollection<LinkedEntityResult>> recognizeLinkedEntities(List<TextDocumentInput> inputs) {}
    public Mono<DocumentResultCollection<LinkedEntityResult>> recognizeLinkedEntities(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
    public Mono<Response<DocumentResultCollection<LinkedEntityResult>>> recognizeLinkedEntitiesWithResponse(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}

    // (5) key phrase
    public Mono<LinkedEntityResult> extractKeyPhrases(String text) {} 
    public Mono<LinkedEntityResult> extractKeyPhrases(String text, String language) {}
    
    public Mono<DocumentResultCollection<LinkedEntityResult>> extractKeyPhrases(List<String> inputs) {}
    public Mono<DocumentResultCollection<LinkedEntityResult>> extractKeyPhrases(List<String> inputs, String language) {}

    public Mono<DocumentResultCollection<LinkedEntityResult>> extractKeyPhrases(List<TextDocumentInput> inputs) {}
    public Mono<DocumentResultCollection<String>> extractKeyPhrases(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
    public Mono<Response<DocumentResultCollection<LinkedEntityResult>>> extractKeyPhrasesWithResponse(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
        
    // (6) sentiment
    public Mono<TextSentimentResult> analyzeSentiment(String input) {}
    public Mono<TextSentimentResult> analyzeSentiment(String input, String language) {}
    public Mono<Response<TextSentimentResult>> analyzeSentimentWithResponse(String input, String language) {}
    
    public Mono<DocumentResultCollection<TextSentimentResult>> analyzeSentiment(List<String> inputs) {}
    public Mono<DocumentResultCollection<TextSentimentResult>> analyzeSentiment(List<String> inputs, String langauge) {}

    public Mono<DocumentResultCollection<TextSentimentResult>> analyzeSentiment(List<TextDocumentInput> inputs) {}
    public Mono<DocumentResultCollection<TextSentimentResult>> analyzeSentiment(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
    public Mono<Response<DocumentResultCollection<TextSentimentResult>>> analyzeSentimentWithResponse(
        List<TextDocumentInput> inputs, TextAnalyticsRequestOptions options) {}
}

TextAnalyticsClient // TODO: need to update with the async api changes

public final class TextAnalyticsClient {
    // (1) language
    public DetectLanguage detectLanguage(String text) {}
    public DetectLanguage detectLanguage(String text, String countryHint) {}
    public Response<DetectLanguage> detectLanguageWithResponse(String text, String countryHint, Context context){}
                                
    public DocumentResultCollection<DetectLanguage> detectLanguages(List<String> inputs) {}
    public DocumentResultCollection<DetectLanguage> detectLanguages(List<String> inputs, String language) {}    
    
    public DocumentResultCollection<DetectLanguage> detectLanguages(List<DetectLangaugeInput> document) {}
    public DocumentResultCollection<DetectLanguage> detectLanguages(
        List<DetectLangaugeInput> documents, TextAnalyticsRequestOptions options) {}
    public Response<DocumentResultCollection<DetectLanguage>> detectLanguagesWithResponse(
        List<DetectLangaugeInput> documents, TextAnalyticsRequestOptions options, Context context) {}

    // (2) entities
    public PagedIterable<NamedEntity> recognizeEntities(String text) {}
    public PagedIterable<NamedEntity> recognizeEntities(String text, String language) {}
      
    public DocumentResultCollection<NamedEntity> recognizeEntities(List<String> inputs) {}
    public DocumentResultCollection<NamedEntity> recognizeEntities(List<String> inputs, String language) {}    
        
    public DocumentResultCollection<NamedEntity> recognizeEntities(List<TextDocumentInput> documents) {}    
    public DocumentResultCollection<NamedEntity> recognizeEntities(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options){}
    public Response<DocumentResultCollection<NamedEntity>> recognizeEntitiesWithResponse(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {}

    // (3) PII entities
    public PagedIterable<NamedEntity> recognizePiiEntities(String text) {}
    public PagedIterable<NamedEntity> recognizePiiEntities(String text, String language) {}    
    
    public DocumentResultCollection<NamedEntity> recognizePiiEntities(List<String> inputs) {}
    public DocumentResultCollection<NamedEntity> recognizePiiEntities(List<String> inputs, String language) {}
    
    public DocumentResultCollection<NamedEntity> recognizePiiEntities(List<TextDocumentInput> documents) {}
    public DocumentResultCollection<NamedEntity> recognizePiiEntities(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options) {}
    public Response<DocumentResultCollection<NamedEntity>> recognizePiiEntitiesWithResponse(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {}
        
    // (4) Link entities
    public LinkedEntityResult> recognizeLinkedEntities(String text) {} 
    public PagedIterable<LinkedEntity> recognizeLinkedEntities(String text, String language) {}
            
    public DocumentResultCollection<LinkedEntity> recognizeLinkedEntities(List<String> inputs) {}
    public DocumentResultCollection<LinkedEntity> recognizeLinkedEntities(List<String> inputs, String language) {}        
         
    public DocumentResultCollection<LinkedEntity>> recognizeLinkedEntities(List<TextDocumentInput> documents) {}    
    public DocumentResultCollection<LinkedEntity> recognizeLinkedEntities(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options) {}
    public Response<DocumentResultCollection<LinkedEntity>> recognizeLinkedEntitiesWithResponse(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {}

    // (5) key phrase
    public PagedIterable<String> extractKeyPhrases(String text) {} 
    public PagedIterable<String> extractKeyPhrases(String text, String language) {}
        
    public DocumentResultCollection<String> extractKeyPhrases(List<String> inputs) {}
    public DocumentResultCollection<String> extractKeyPhrases(List<String> inputs, String language) {}    
        
    public DocumentResultCollection<String> extractKeyPhrases(List<TextDocumentInput> documents) {}    
    public DocumentResultCollection<String> extractKeyPhrases(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options) {}
    public Response<DocumentResultCollection<String>> extractKeyPhrasesWithResponse(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {}
        
    // (6) sentiment
    public TextSentiment analyzeSentenceSentiment(String sentence) {}
    public TextSentiment analyzeSentenceSentiment(String text, String language) {}
    public Response<TextSentiment> analyzeSentenceSentimentWithResponse(
        String text, String language, Context context) {}
            
    public DocumentResultCollection<TextSentiment> analyzeDocumentSentiment(List<String> inputs) {}
    public DocumentResultCollection<TextSentiment> analyzeDocumentSentiment(List<String> inputs, String langauge) {}
            
    public DocumentResultCollection<DocumentSentiment> analyzeDocumentSentiment(List<TextDocumentInput> documents) {}
    public DocumentResultCollection<DocumentSentiment> analyzeDocumentSentiment(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options) {}
    public Response<DocumentResultCollection<DocumentSentiment>> analyzeDocumentSentimentWithResponse(
        List<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {}
}

Input Models

DetectLangaugeInput

public final class DetectLangaugeInput {
    public DetectLangaugeInput(String id, String text) {}
    public String getId() {}
    public String getText() {}
    public String getCountryHint() {}
    public DetectLangaugeInput setCountryHint(String countryHint) {}
}

TextDocumentInput

public final class TextDocumentInput {
    public TextDocumentInput(String id, String text) {}
    public String getId() {}
    public String getText() {}
    public String getLanguage() {}
    public TextDocumentInput setLanguage(String language) {}
}

TextAnalyticsRequestOptions

public final class TextAnalyticsRequestOptions {
    public String getModelVersion() {}
    public TextAnalyticsRequestOptions setModelVersion(String modelVersion) {}
    public boolean isShowStatistics() {}
    public TextAnalyticsRequestOptions setShowStatistics(boolean showStatistics) {}
}

TextAnalyticsClientOptions

public final class TextAnalyticsClientOptions {
    public String getDefaultLanguage() {}
    public TextAnalyticsClientOptions setDefaultLanguage(String defaultLanguage) {}
    public String getDefaultCountryHint() {}
    public TextAnalyticsClientOptions setDefaultCountryHint(String defaultCountryHint) {}
}

Output Models

DocumentResult

public final class DocumentResult {
    public String getId() {}
    public TextDocumentStatistics getStatistics() {}
    public DocumentError getError() {}
}

DocumentResultCollection

public final class DocumentResultCollection extends IterableStream {
    public String getModelVersion() {}
    public TextBatchStatistics getStatistics() {}
}

DetectLanguageResult

public final class DetectLanguageResult extends DocumentResult{
    public DetectLanguage getPrimaryLanguage() {}
    public List<DetectLanguage> getItems() {}
}

TextSentimentResult

public final class TextSentimentResult extends DocumentResult{
    public TextSentiment getDocumentSentiment() {}
    public List<TextSentiment> getItems() {}
}

LinkedEntityResult

public final class LinkedEntityResult extends DocumentResult{
    public List<LinkedEntity> getItems() {}
}

KeyPhraseResult

public final class KeyPhraseResult extends DocumentResult{
    public List<NamedEntity> getItems() {}
}

NamedEntityResult

public final class NamedEntityResult extends DocumentResult{
   public List<String> getItems() {}
}

TextDocumentStatistics

public final class TextDocumentStatistics {
    public int getCharacterCount() {}
    public int getTransactionCount() {}
}

TextBatchStatistics

public final class TextBatchStatistics {
    public int getDocumentCount() {}
    public int getValidDocumentCount() {}
    public int getErroneousDocumentCount() {}
    public long getTransactionCount() {}
}

DocumentError

public final class DocumentError {
    public String getId() {}
    public Object getError()
}

DetectLanguage

public final class DetectLanguage {
    public String getName() {}
    public String getIso6391Name() {}
    public double getScore() {}
}

NamedEntity

public final class NamedEntity {
    public String getText() {}
    public String getSubType() {}
    public String getType() {}
    public int getOffset() {}
    public int getLength() {}
    public double getScore() {}
}

LinkedEntity

public final class LinkedEntity {
    public String getName() {}
    public List<LinkedEntityMatch> getMatches() {}
    public String getLanguage() {}
    public String getId() {}
    public String getUrl() {}
    public String getDataSource() {}
}

LinkedEntityMatch

public final class LinkedEntityMatch {
    public double getScore() {}
    public String getText() {}
    public int getOffset() {}
    public int getLength() {}
}

TextSentiment

public final class TextSentiment {
    public String getLength() {}
    public double getNegativeScore() {}
    public double getNeutralScore() {}
    public double getPositiveScore() {}
    public int getOffSet() {}
    public TextSentimentClass getSentimentClass() {}
}

TextSentimentClass

public enum TextSentimentClass {
    POSITIVE,
    NEGATIVE,
    NEUTRAL,
    MIXED;
}

TextAnalyticsServiceVersion

public enum TextAnalyticsServiceVersion implements ServiceVersion {
    V1_0("1.0");
    public String getVersion() {}
    public static TextAnalyticsServiceVersion getLatest() {}
}

Samples

Create a synchronous client

// Instantiate a client that will be used to call the service.
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
    .endpoint(endpoint) // defined by user
    .credential(tokenCredential) // defined by user
    .buildClient();

Single Text

1. Detect language in text.

 // The text that need be analysed.
String text = "hello world";

DetectLanguage detectedLanguage = client.detectLanguage(text);
System.out.printf("Detected Language: %s, ISO 6391 Name: %s, Score: %s",
    detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());

2. Recognize entities in text.

// The text that need be analysed.
String text = "Satya Nadella is the CEO of Microsoft";

client.recognizeEntities(text).stream().forEach(
    namedEntity -> System.out.printf(
        "Recognized Entity: %s, Entity Type: %s, Entity Subtype: %s, Offset: %s, Length: %s, Score: %s",
        namedEntity.getText(),
        namedEntity.getType(),
        namedEntity.getSubType(),
        namedEntity.getOffset(),
        namedEntity.getLength(),
        namedEntity.getScore()));

3. Recognize personally identifiable information in text.

// The text that need be analysed.
String text = "My SSN is 555-55-5555";

client.recognizePiiEntities(text).stream().forEach(
    namedEntity -> System.out.printf(
        "Recognized PII Entity: %s, Entity Type: %s, Entity Subtype: %s, Offset: %s, Length: %s, Score: %s%n",
        namedEntity.getText(),
        namedEntity.getType(),
        namedEntity.getSubType(),
        namedEntity.getOffset(),
        namedEntity.getLength(),
        namedEntity.getScore())));

4. Recognize linked entities in text.

// The text that need be analysed.
String text = "Old Faithful is a geyser at Yellowstone Park";

client.recognizeLinkedEntities(text).stream().forEach(
    linkedEntity -> System.out.printf("Recognized Linked Entity: %s, URL: %s, Data Source: %s%n",
        linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());

5. Recognize key phrases in text.

String text = "My cat might need to see a veterinarian";
client.extractKeyPhrases(text).stream().forEach(
    phrase -> System.out.printf(String.format("Recognized Phrases: %s", phrase)));

6. Analyze sentiment in text.

 // The text that need be analysed.
String text = "The hotel was dark and unclean.";

TextSentiment sentenceSentiment = client.analyzeSentenceSentiment(text);

System.out.printf("Recognized Sentiment: %s, Positive Score: %s, Neutral Score: %s, Negative Score: %s.%n",
    sentenceSentiment.getSentimentClass(),
    sentenceSentiment.getPositiveScore(),
    sentenceSentiment.getNeutralScore(),
    sentenceSentiment.getNegativeScore()
);

Batch of Documents

1 Detect language per document

// The texts that need be analysed.
List<DetectLangaugeInput> inputs = Arrays.asList(
    new DetectLangaugeInput("1", "This is written in English"),
    new DetectLangaugeInput("2", "Este es un document escrito en Español.").setCountryHint("es")
);

DocumentResultCollection<DetectLanguageResult> detectedLanguagesCollection = client.detectLanguages():
// batch level stats
System.out.printf("A batch of document statistics, document count: %s", detectedLanguagesCollection.getStatistics().getDocumentCount());

for (DetectLanguageResult documentResult: detectedLanguagesColection) {
    if (documentResult.getError().getErrorMessage() != null) {
        for (DetectLanguage detectedLanguage : documentResult.getItems()) {
            System.out.printf("Detected Language: %s, ISO 6391 Name: %s, Score: %s",
                detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
        }
    }
    // doc level stats
    System.out.printf("Character count per document: %s", documentResult.getStatistics().getCharacterCount());
}

OR with IterableStream

IterableStream<DetectLanguage> detectedLanguageResultItems = detectedLanguagesColection.stream().map(detectedLanguageResult -> detectedLanguageResult.getItems());

// Detecting languages for a document from a batch of documents
detectedLanguageResultItems.stream().forEach(detectedLanguage ->
    System.out.printf("Detected Language: %s, ISO 6391 Name: %s, Score: %s",
        detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()));

2. Recognize entities per document in a batch of documents.

// The texts that need be analysed.
List<TextDocumentInput> inputs = Arrays.asList(
    new TextDocumentInput("1", "Satya Nadella is the CEO of Microsoft").setLanguage("US"),
    new TextDocumentInput("2", "Elon Musk is the CEO of SpaceX and Tesla.").setLanguage("US")
);

DocumentResultCollection<NamedEntity> detectedBatchResult = client.recognizeEntities(inputs);

detectedBatchResult.stream().forEach(detectedEntityResult ->
    detectedEntityResult.getItems().stream().forEach(entity ->
        System.out.printf("Recognized NamedEntity: %s, NamedEntity Type: %s, NamedEntity Subtype: %s,
            Offset: %s, Length: %s, Score: %s",
            entity.getText(),
            entity.getType(),
            entity.getSubType(),
            entity.getOffset(),
            entity.getLength(),
            entity.getScore())));

3. Recognize personally identifiable information per document in a batch of documents.

// The texts that need be analysed.
List<TextDocumentInput> inputs = Arrays.asList(
    new TextDocumentInput("1", "My SSN is 555-55-5555").setLanguage("US"),
    new TextDocumentInput("2", "Visa card 4147999933330000").setLanguage("US")
);

DocumentResultCollection<NamedEntity> detectedBatchResult = client.recognizePiiEntities(inputs);

detectedBatchResult.stream().forEach(piiEntityDocumentResult ->
    piiEntityDocumentResult.getItems().stream().forEach(entity ->
        System.out.printf("Recognized Personal Identifiable Info NamedEntity: %s, NamedEntity Type: %s,
            NamedEntity Subtype: %s, Score: %s",
            entity.getText(), 
            entity.getType(), 
            entity.getSubType(),
            entity.getScore())));

4. Recognize linked entities per document in a batch of documents.

// The texts that need be analysed.
List<TextDocumentInput> inputs = Arrays.asList(
    new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("US"),
    new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("US")
);

DocumentResultCollection<LinkedEntity> detectedLinkedEntitiesResult = client.recognizeLinkedEntities(inputs);

// Detecting language from a batch of documents
detectedLinkedEntitiesResult.stream().forEach(linkedEntityDocumentResult ->
    linkedEntityDocumentResult.getItems().stream().forEach(linkedEntity ->
        System.out.printf("Recognized Linked NamedEntity: %s, URL: %s, Data Source: %s",
            linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource())));

5. Recognize key phrases per document in a batch of documents.

// The texts that need be analysed.
List<TextDocumentInput> inputs = Arrays.asList(
    new TextDocumentInput("1", "My cat might need to see a veterinarian").setLanguage("US"),
    new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("US")
);

DocumentResultCollection<String> detectedBatchResult = client.extractKeyPhrases(inputs);

// Detecting key phrase for each of document from a batch of documents
detectedBatchResult.stream().forEach(keyPhraseResult ->
    keyPhraseResult.getItems().stream().forEach(keyPhrases ->
        System.out.printf("Recognized Phrases: %s", keyPhrases)));

6. Analyze sentiment per document in a batch of documents.

List<TextDocumentInput> inputs = Arrays.asList(
    new TextDocumentInput("1", "The hotel was dark and unclean.").setLanguage("US"),
    new TextDocumentInput("2", "The restaurant had amazing gnocci.").setLanguage("US")
);

DocumentResultCollection<DocumentSentiment> batchResult = client.analyzeDocumentSentiment(inputs);

IterableStream<DocumentSentiment> documentSentimentResult = batchResult.stream().map(batchResultItem -> batchResultItem.getItems());

// Detecting sentiment for each of document from a batch of documents
documentSentimentResult.stream().forEach(sentenceSentiment ->
        System.out.printf("Recognized sentence sentiment: %s, Positive Score: %s, Neutral Score: %s,
            Negative Score: %s. Length of sentence: %s, Offset of sentence: %s",
            sentenceSentiment.getTextSentimentClass(),
            sentenceSentiment.getPositiveScore(),
            sentenceSentiment.getLength(),
            sentenceSentiment.getOffSet()))));

// why should we expose getPositive(), getNegative() all of them, 
// if it has a positive SentimentClass only expose that score?
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment