Skip to content

Instantly share code, notes, and snippets.

df_15_val.reset_index(inplace = True)
ypred_15 = pd.Series(ypred_15)
df_15_val = df_15_val.join(ypred_15.rename('15d_forecast'), how = 'left')
df_15_val["True_Forecasted"] = df_15_val.future_15dprice_class.astype(str) + " " + df_15_val["15d_forecast"].astype(str)
df_15_val[["future_15dprice_change", "True_Forecasted"]].groupby("True_Forecasted").agg({'future_15dprice_change': ['mean', 'median','min', 'max',]})
for j, i in enumerate(targets_all):
remove_redundant_list = list(targets_all)
remove_redundant_list.remove(i)
new_column_classification = i + "_classification"
# first let's create a new column with the 150days prediction classifier: 0 when price dropped and 1 when it's increased.
df_compact_reserve.loc[df_compact_reserve[i]<0.01, new_column_classification] = 0
df_compact_reserve.loc[df_compact_reserve[i]>=0.01, new_column_classification] = 1
#let's join prediction result to the test dataframe and get indexes
target_vars = ["future_15dprice_change", "future_30dprice_change", "future_60dprice_change", "future_90dprice_change", "future_120dprice_change", "future_150dprice_change"]
min_15d_threshold = 0.038
min_30d_threshold = 0.059
min_60d_threshold = 0.093
min_90d_threshold = 0.122
min_120d_threshold = 0.148
min_150d_threshold = 0.173
list_of_thresholds = [min_15d_threshold,min_30d_threshold,min_60d_threshold,min_90d_threshold,min_120d_threshold,min_150d_threshold]
@Kazanskyi
Kazanskyi / code.py
Created June 8, 2022 14:31
Script that combines all stock data from different sources together
import os
import pandas as pd
import numpy as np
from datetime import date
import VIX
import new_earnings
import tiingo_data as tii
import US_bond
import crisis_dataset
@Kazanskyi
Kazanskyi / new_earnings.py
Created June 8, 2022 13:43
ZACK.com future earnings and dividends data
import time
from datetime import date
import pandas as pd
import numpy as np
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
import io
@Kazanskyi
Kazanskyi / crisis_dataset.py
Created June 7, 2022 21:04
Manually set up days of S&P drop for more than 15% and count number of days after the recent drop
import datetime
from datetime import date
import pandas as pd
import numpy as np
def get_dates():
last_date = date.today()
historical_days = 1450
historical_date = last_date-datetime.timedelta(days=historical_days)
@Kazanskyi
Kazanskyi / US_bond.py
Created June 7, 2022 20:48
10-Y bond historical daily data from yahoo.finance
import time
import datetime
from datetime import date
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.common.keys import Keys
import pandas as pd
import os
def download_tables(last_date = date.today(), historical_days = 1450):
@Kazanskyi
Kazanskyi / VIX.py
Created June 7, 2022 20:44
VIX volatility data
import pandas as pd
import datetime
from datetime import date
def get_vix(last_date = date.today(), historical_days = 1450):
historical_date = last_date-datetime.timedelta(days=historical_days)
url="https://cdn.cboe.com/api/global/us_indices/daily_prices/VIX_History.csv"
response=pd.read_csv(url)
import pandas as pd
import numpy as np
import tiingo_data as tii
from datetime import date
print("Please enter the company's symbol: ")
symbol = input()
#Get all companies sector, industry, location, etc
metadata = tii.fetch_metadata()
import pandas as pd #data manipulation and analysis package
import numpy as np
import datetime
from datetime import date
import requests
#Enter TIINGO
print("Enter 40 signs tiingo API: ")
Tiingo_API = input()