This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Basic 3 Layer Feed Forward Neural Network with Back Propagation from Scratch | |
import numpy as np | |
from scipy import optimize | |
import matplotlib.pyplot as plt | |
# Sample Input Data | |
x = np.array(([3,5],[5,1],[10,2]), dtype=float) | |
y = np.array(([75],[82],[93]),dtype=float) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Step1: Training Data and Preprocessing | |
# a) Tokenize the Input text (sentence to words) | |
# b) Form the Vocabulary and remove Infrequent words | |
# c) Add "Start" and "End" Tokens to the sentences | |
# Vocabulary Size: 8000 words | |
vocab_size = 8000 | |
# Token to replace the infrequent words | |
unknown_token = 'Unknown_Token' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Recurrent Neural Network based Language Model | |
# Aim: To train the NN so that it can generate random text using words from Vocabulary which is | |
# grammatically correct and makes some sense. | |
# The vocabulary is made using the input raw text data | |
# Import Dependencies | |
import os | |
import sys | |
import csv | |
import itertools |