Skip to content

Instantly share code, notes, and snippets.

View Shagun-25's full-sized avatar
🎯
Focusing

Shagun Kala Shagun-25

🎯
Focusing
View GitHub Profile
# Visualising the anomalies
plt.figure(figsize=(16, 6))
plt.plot(test_data[61:]['date'], test_data[61:]['price'].values, color = 'orange', label = 'Test Data')
plt.scatter(anamolies['date'], anamolies['price'], marker='*', s = 200, color='black', label='Anomaly')
plt.title('Anamoly Detection in Stock Prices')
plt.xlabel('Years')
plt.ylabel('NIFTY Index Prices')
plt.legend()
plt.show()
plt.figure(figsize=(20, 5))
plt.plot(tweet_news['score'])
plt.xlabel('Years')
plt.ylabel('Sentiment Scores')
from wordcloud import WordCloud, STOPWORDS
import matplotlib.pyplot as plt
stopwords = set(STOPWORDS)
def show_wordcloud(data, title = None):
wordcloud = WordCloud(
background_color='white',
stopwords=stopwords,
max_words=200000,
max_font_size=40,
#Function to Predict Price for Random 60 Consecutive Days
def prediction_multiple_days_quantized():
#Loading Data
data = pd.read_csv('data_processed_final.csv')
with open('min_max.pickle', 'rb') as i:
minmax = pickle.load(i)
#Predicting
#Function to Predict Next Day Index Price
def prediction_single_day_quantized(date): #date: Enter date for which you want next day's price prediction.
#Loading Data
data = pd.read_csv('data_processed_final.csv')
with open('min_max.pickle', 'rb') as i:
minmax = pickle.load(i)
#Predicting
#Function to Predict Price for Random 60 Consecutive Days
def prediction_multiple_days():
#Loading Data
data = pd.read_csv('data_processed_final.csv')
with open('min_max.pickle', 'rb') as i:
minmax = pickle.load(i)
#Predicting
#Function to Predict Next Day Index Price
def prediction_single_day(date): #date: Enter date for which you want next day's price prediction.
#Loading Data
data = pd.read_csv('data_processed_final.csv')
with open('min_max.pickle', 'rb') as i:
minmax = pickle.load(i)
#Predicting
run_model = tf.function(lambda x: model(x))
BATCH_SIZE = 64
STEPS = None
INPUT_SIZE = 1
concrete_func = run_model.get_concrete_function(tf.TensorSpec([BATCH_SIZE, STEPS, INPUT_SIZE], model.inputs[0].dtype))
MODEL_DIR = "./saved_model"
converter = tf.lite.TFLiteConverter.from_keras_model(model)
converter.optimizations = [tf.lite.Optimize.DEFAULT]
!rm -rf ./logs/
keras.backend.clear_session()
%load_ext tensorboard
model = Sequential()
# Adding the input layer
model.add(LSTM(units=128, activation='tanh', kernel_initializer=tf.keras.initializers.glorot_uniform(seed=26), input_shape = (trainX.shape[1], 1), unroll = True))
# Adding the output layer