Skip to content

Instantly share code, notes, and snippets.

@nahidalam
nahidalam / sampleRasaAction.py
Created September 30, 2018 05:12
How to implement actions in Rasa
class ActionOrderProduct(Action):
def name(self):
return 'action_order_product'
def run(self, dispatcher, tracker, domain):
router = tracker.get_slot('router')
confirmationNumber = 123456 #later generate through some process
response = """Your product {} is ordered for you. It will be shipped to your address. Your confirmation number is {}""".format(router, confirmationNumber)
@nahidalam
nahidalam / train_nlu.py
Created September 30, 2018 05:17
training the NLU model with Rasa
def train_nlu(data, configuration, model_dir):
training_data = load_data(data)
trainer = Trainer(config.load(configuration))
trainer.train(training_data)
model_directory = trainer.persist(model_dir, fixed_model_name = 'customernlu')
return model_directory
@nahidalam
nahidalam / dialog.py
Created September 30, 2018 05:19
Training and running dialogue model
def train_dialogue(domain_file = 'customer_domain.yml',
model_path = './models/dialogue',
training_data_file = './data/stories.md'):
agent = Agent(domain_file, policies = [MemoizationPolicy(), KerasPolicy()])
agent.train(
training_data_file,
epochs = 300,
batch_size = 50,
def EmbeddingRec(EMBEDDING_SIZE, NUM_MOVIES, NUM_USERS, ROW_COUNT):
movie_input = keras.Input(shape=(1,), name='movie_id')
movie_emb = layers.Embedding(output_dim=EMBEDDING_SIZE, input_dim=NUM_MOVIES, input_length=ROW_COUNT, name='movie_emb')(movie_input)
movie_vec = layers.Flatten(name='FlattenMovie')(movie_emb)
movie_model = keras.Model(inputs=movie_input, outputs=movie_vec)
user_input = keras.Input(shape=(1,), name='user_id')
#lets assume an user ID 200
TEST_USER_ID = 200
#get the embedding of this user
user_embedding = user_model.predict([TEST_USER_ID]).reshape(1,-1)[0]
#create the KNN model
from sklearn.neighbors import KNeighborsClassifier
clf = KNeighborsClassifier(n_neighbors=11)
clf.fit(MOVIE_EMBEDDING_LIST, knn_train_label)
contraction_map = {"ain't": "is not", "aren't": "are not","can't": "cannot", "'cause": "because", "could've": "could have", "couldn't": "could not",
"didn't": "did not", "doesn't": "does not", "don't": "do not", "hadn't": "had not", "hasn't": "has not", "haven't": "have not",
"he'd": "he would","he'll": "he will", "he's": "he is", "how'd": "how did", "how'd'y": "how do you", "how'll": "how will", "how's": "how is",
"I'd": "I would", "I'd've": "I would have", "I'll": "I will", "I'll've": "I will have","I'm": "I am", "I've": "I have", "i'd": "i would",
"i'd've": "i would have", "i'll": "i will", "i'll've": "i will have","i'm": "i am", "i've": "i have", "isn't": "is not", "it'd": "it would",
import pandas as pd
import numpy as np
import json
import os, glob
from __future__ import unicode_literals, print_function, division
from io import open
import unicodedata
import string
import re
'''
Reusable set of functions to convert a tuple of strings (pair) to tensors
Reference: https://pytorch.org/tutorials/intermediate/seq2seq_translation_tutorial.html
'''
import torch
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def indexesFromSentence(lang, sentence):
from rouge_score import rouge_scorer
def read_input(filename = 'evaluation_input.txt'):
input_pair = []
# read evaluation_input.txt
# append each line to input_pair
return input_pair
class Encoder(tf.keras.Model):
def __init__(self, vocab_size, embedding_dim, enc_units, batch_sz):
'''
vocab_size: number of unique words
embedding_dim: dimension of your embedding output
enc_units: how many units of RNN cell
batch_sz: batch of data passed to the training in each epoch
'''
super(Encoder, self).__init__()
self.batch_sz = batch_sz