Skip to content

Instantly share code, notes, and snippets.

View sriram-pasupuleti's full-sized avatar

Sriram sriram-pasupuleti

View GitHub Profile
class Custom_model:
def __init__(self, sample_k, sample_size, base_models = None):
"""
Parameters:
sample_k- No of base models
sample_size- No of data samples per base model
base_models- List of base models to choose
"""
self.k = sample_k
self.sample_size = sample_size
fraction = [0.6, 1]
depth = [2,3,5,-1]
params = [[f,d] for f in fraction for d in depth]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for p in params:
GBDT = lgb.LGBMRegressor(feature_fraction = p[0],max_depth=p[1],random_state=42)
L2 = [0.001, 0.01, 0.1, 1, 10, 100, 1000]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for l2 in L2:
SGD = SGDRegressor(loss='squared_loss', penalty='l2', alpha=l2)
RGD = RANSACRegressor(base_estimator = SGD)
RGD.fit(X_train, y_train)
def interpolate_data(data, expand_len):
l_row = data.shape[0]
l_col = data.shape[1]
dummy_rows_len = np.floor(expand_len/l_row).astype('int')
upsamp = np.array([np.nan]*l_col*expand_len).reshape(-1, l_col)
DF_upsamp = pd.DataFrame(upsamp, columns=data.columns)
id_upsamp = 0
for idx in range(data.shape[0]):
try:
n_estimators = [5,10,20]
depth = [2,3,5]
params = [[n,d] for n in n_estimators for d in depth]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for p in params:
GBDT = GradientBoostingRegressor(n_estimators=p[0], max_depth=p[1])
n_estimators = [20,40,100]
depth = [2,3,5]
params = [[n,d] for n in n_estimators for d in depth]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for p in params:
RF = RandomForestRegressor(n_estimators=p[0], max_depth=p[1])
n_estimators = [20,30,50,60]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for n in n_estimators:
ABD = AdaBoostRegressor(n_estimators=n)
ABD.fit(X_train, y_train)
train_pred = ABD.predict(X_train)
depth = [2,3,5,6]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for d in depth:
DT = DecisionTreeRegressor(max_depth=d)
DT.fit(X_train, y_train)
train_pred = DT.predict(X_train)
alpha = [0.001, 0.01, 0.1, 1, 10, 100, 1000]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for a in alpha:
SGD = SGDRegressor(loss='epsilon_insensitive', penalty='l2', alpha=a)
SGD.fit(X_train, y_train)
train_pred = SGD.predict(X_train)
L2 = [0.001, 0.01, 0.1, 1, 10, 100, 1000]
train_MAE = []
test_MAE = []
train_MAPE = []
test_MAPE = []
for l2 in L2:
SGD = SGDRegressor(loss='squared_error', penalty='l2', alpha=l2)
SGD.fit(X_train, y_train)
train_pred = SGD.predict(X_train)