Created
January 23, 2023 00:56
-
-
Save mahimairaja/32d8a7ce9c513c7b520b5804387c9516 to your computer and use it in GitHub Desktop.
Training a Linear Regression model in Pipeline.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Train the model | |
import numpy as np | |
from sklearn.compose import ColumnTransformer | |
from sklearn.pipeline import Pipeline | |
from sklearn.impute import SimpleImputer | |
from sklearn.preprocessing import StandardScaler, OneHotEncoder | |
from sklearn.linear_model import LinearRegression | |
# Define preprocessing for numeric columns (scale them) | |
numeric_features = [6,7,8,9] | |
numeric_transformer = Pipeline(steps=[ | |
('scaler', StandardScaler())]) | |
# Define preprocessing for categorical features (encode them) | |
categorical_features = [0,1,2,3,4,5] | |
categorical_transformer = Pipeline(steps=[ | |
('onehot', OneHotEncoder(handle_unknown='ignore'))]) | |
# Combine preprocessing steps | |
preprocessor = ColumnTransformer( | |
transformers=[ | |
('num', numeric_transformer, numeric_features), | |
('cat', categorical_transformer, categorical_features)]) | |
# Create preprocessing and training pipeline | |
pipeline = Pipeline(steps=[('preprocessor', preprocessor), | |
('regressor', LinearRegression())]) | |
# fit the pipeline to train a linear regression model on the training set | |
model = pipeline.fit(X_train, (y_train)) | |
print (model) | |
# Output : | |
# Pipeline(steps=[('preprocessor', | |
# ColumnTransformer(transformers=[('num', | |
# Pipeline(steps=[('scaler', | |
# StandardScaler())]), | |
# [6, 7, 8, 9]), | |
# ('cat', | |
# Pipeline(steps=[('onehot', | |
# OneHotEncoder(handle_unknown='ignore'))]), | |
# [0, 1, 2, 3, 4, 5])])), | |
# ('regressor', LinearRegression())]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment