ADP/실기
회귀분석(LinearRegression, Ridge, Lasso, Elasticnet)
hyerimir
2024. 1. 29. 19:36
# 머신러닝 기법을 이용한 방법
from sklearn.linear_model import LinearRegression
# Ridge, L2 정규화
# Lasso, L1 정규화
# Elastic-net, L1, L2 정규화 혼합
import pandas as pd
import numpy as np
import mglearn
X, y = mglearn.datasets.load_exteded_boston()
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 48)
# ! pip install sklearn
from sklearn.linear_model import LinearRegression
lr = LinearRegression()
lr.fit(X_train, y_train)
# 기울기
lr.coef_
# 절편
lr.intercept_
y_pred = lr.predict(X_test)
resid = y_test - y_pred
lr.score(X_train, y_train)
lr.score(X_test, y_test)
# mse
(resid**2).sum()/len(y_test)
# rmse
np.sqrt((resid**2).sum()/len(y_test))
from sklearn.metrics import mean_squared_error
mse = mean_squared_error(y_test, y_pred)
rmse = np.sqrt(mse)
# 과대적합이 되어, 다중공선성 문제가 있을 수 있다고 판단
from statsmodels.stats.outliers_influence import variance_inflation_factor
vif_list = []
for i in range(X_train.shape[1]):
vif_list.append([variance_inflation_factor(X_train, i])
vif_df = pd.DataFrame(vif_list, columns = ['vif'])
vif_df[vif_df['vif'] < 10]
# Ridge, Lasso, Elasticnet 모두 alpha 값에 영향을 받음
# alpha 값 조정을 통해 과대적합 피할 수 있음
# alpha 값이 너무 크면 과소적합이 될 수 있으므로 최적의 값을 찾아야 함
# Ridge
import pandas as pd
import numpy as np
import mglearn
X, y = mglearn.datasets.load_exteded_boston()
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 48)
import warnings
warning.filterwarning(action = 'ignore')
from sklearn.linear_model import Ridge
ridge = Ridge() # 여기서 alpha = 1이 디폴트 값
ridge.fit(X_train, y_train)
# 사용한 데이터 컬럼 수
np.sum(ridge.coef_ != 0)
# 훈련 데이터셋 성능
ridge.score(X_train, y_train)
# 테스트 데이터셋 성능
ridge.score(X_test, y_test)
from sklearn.model_selection import GridSearchCV
ridge = Ridge()
param_grid = {'alpha' : (0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.5, 1, 2, 3)}
model = GridSearchCV(ridge, param_grid)
model.fit(X_train, y_train)
model.best_estimators_
model = Ridge(alpha = 0.05)
model.fit(X_train, y_train)
np.sum(model.coef_ != 0)
model.score(X_train, y_train)
model.score(X_test, y_test)
lasso = Lasso()
param_grid = {'alpha' : (0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.5, 1, 2, 3)}
model = GridSearchCV(ridge, param_grid)
model_lasso = model_lasso.fit(X_train, y_train)
model_lasso.best_estimator_
from sklearn.linear_model import Lasso
lasso = Lasso(alpha = 0.002)
lasso.fit(X_train, y_train)
np.sum(lasso.coef_ != 0)
lasso.score(X_trian, y_train)
lasso.score(X_test, y_test)
# Elastic-net
from sklearn.linear_model import ElasticNet
elastic = ElasticNet()
elastic.fit(X_train, y_train)
elastic.score(X_train, y_train)
elastic.score(X_test, y_test)
lasso = Lasso()
param_grid = {'alpha' : (0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.5, 1, 2, 3)}
model = GridSearchCV(ridge, param_grid)
model_lasso = model_lasso.fit(X_train, y_train)
model_lasso.best_estimator_
from sklearn.linear_model import Lasso
lasso = Lasso(alpha = 0.002)
lasso.fit(X_train, y_train)
np.sum(lasso.coef_ != 0)
lasso.score(X_trian, y_train)
lasso.score(X_test, y_test)