Skip to content

Instantly share code, notes, and snippets.

@nidhinkumar06
Created March 31, 2019 07:21
Show Gist options
  • Save nidhinkumar06/c49e16d824b37e5f13c72a32604b7f6e to your computer and use it in GitHub Desktop.
Save nidhinkumar06/c49e16d824b37e5f13c72a32604b7f6e to your computer and use it in GitHub Desktop.
Google Assistant Story Telling Action - Index
'use strict';
// Import the Dialogflow module and response creation dependencies
// from the Actions on Google client library.
let _ = require('lodash');
const {
dialogflow,
MediaObject,
Suggestions,
Image,
Carousel,
SimpleResponse,
} = require('actions-on-google');
// Import the firebase-functions package for deployment.
const functions = require('firebase-functions');
const storiesData = require('./stories');
const audioData = require('./storyAudio');
// Instantiate the Dialogflow client.
const app = dialogflow({debug: true});
// Handle the Dialogflow intent named 'Default Welcome Intent'.
app.intent('Default Welcome Intent', (conv) => {
let title = `It's Story time... Which story would you like to hear today`;
getStories(conv, title);
});
// Listing the stories in carousel
app.intent('actions_intent_OPTION', (conv, params, option) => {
let selectedStory = audioData.find((o) => o.key === option);
// let selectedStory = audioData.find((o) => o.synonyms.includes(option.toLowerCase()));
if (selectedStory) {
if (!conv.surface.capabilities.has('actions.capability.MEDIA_RESPONSE_AUDIO')) {
conv.ask('Sorry, this device does not support audio playback.');
return;
}
conv.ask(new SimpleResponse(`Here is ${selectedStory.title}`));
conv.ask(new MediaObject({
name: `${selectedStory.title}`,
url: `${selectedStory.audioUrl}`,
description: `${selectedStory.description}`,
icon: new Image({
url: `${selectedStory.imageUrl}`,
alt: `${selectedStory.imageAlt}`,
}),
}));
conv.ask(new Suggestions(['More Stories', 'Cancel']));
} else {
let title = `Sorry I forgot ${selectedStory.title}. Do you want to try this`;
getStories(conv, title);
}
});
//Listing the stories in carousel
app.intent('stories intent', (conv, {story}) => {
// conv.ask(`You have selected ${story}`);
let selectedStory = audioData.find((o) => o.key === story);
console.log('story is', story);
console.log('selectedStory is', selectedStory);
if (selectedStory) {
if (!conv.surface.capabilities.has('actions.capability.MEDIA_RESPONSE_AUDIO')) {
conv.ask('Sorry, this device does not support audio playback.');
return;
}
conv.ask(new SimpleResponse(`Here is ${selectedStory.title}`));
conv.ask(new MediaObject({
name: `${selectedStory.title}`,
url: `${selectedStory.audioUrl}`,
description: `${selectedStory.description}`,
icon: new Image({
url: `${selectedStory.imageUrl}`,
alt: `${selectedStory.imageAlt}`,
}),
}));
conv.ask(new Suggestions(['More Stories', 'Cancel']));
} else {
let title = `Sorry I forgot ${story}. Do you want to try this`;
getStories(conv, title);
}
});
//checking the media status
app.intent('actions_intent_MEDIA_STATUS', (conv) => {
const mediaStatus = conv.arguments.get('MEDIA_STATUS');
let response = 'Unknown media status received.';
if (mediaStatus && mediaStatus.status === 'FINISHED') {
response = 'Hope you enjoyed the Story!';
}
conv.ask(response);
conv.ask('Do you want to hear another Story');
conv.ask(new Suggestions('Yes', 'No'));
});
//shuffling the stories
const storyCarousel = () => {
let shuffledStories = _.shuffle(storiesData);
let filteredStories = {};
for (let i = 0; i <= 3; i++) {
filteredStories[shuffledStories[i].key] = shuffledStories[i];
}
const carousel = new Carousel({
items: filteredStories,
});
return carousel;
};
// Handle the Dialogflow follow-up intents
app.intent(['actions_intent_MEDIA_STATUS - yes', 'actions_intent_OPTION - more-stories', 'stories intent - more-stories', 'Default Welcome Intent - more stories'], (conv) => {
conv.ask(`Which story would you like to hear`);
if (!conv.screen) {
let shuffledStories = _.shuffle(storiesData);
let filteredStories = [];
for (let i = 0; i <= 3; i++) {
filteredStories.push(shuffledStories[i]);
}
conv.ask(`${filteredStories[0].title}, ${filteredStories[1].title}, ${filteredStories[2].title}`);
}else {
conv.ask(new Suggestions(['More Stories', 'Cancel']));
return conv.ask(storyCarousel());
}
});
//when there is no input for the speaker
app.intent('actions_intent_NO_INPUT', (conv) => {
// Use the number of reprompts to vary response
const repromptCount = parseInt(conv.arguments.get('REPROMPT_COUNT'));
if (repromptCount === 0) {
conv.ask('Which story would you like to hear');
} else if (repromptCount === 1) {
conv.ask(`Please say the story name.`);
} else if (conv.arguments.get('IS_FINAL_REPROMPT')) {
conv.close(`Sorry we're having trouble. Let's ` +
`try this again later. Goodbye.`);
}
});
//getting the stories for speaker and screen
const getStories = (conv, title) => {
conv.ask(title);
if (!conv.screen) {
let shuffledStories = _.shuffle(storiesData);
let filteredStories = [];
for (let i = 0; i <= 3; i++) {
filteredStories.push(shuffledStories[i]);
}
conv.ask(`${filteredStories[0].title}, ${filteredStories[1].title}, ${filteredStories[2].title}`);
}else {
conv.ask(new Suggestions(['More Stories', 'Cancel']));
return conv.ask(storyCarousel());
}
}
exports.dialogflowFirebaseFulfillment = functions.https.onRequest(app);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment