-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathregressor
77 lines (63 loc) · 2.56 KB
/
regressor
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.ensemble import RandomForestRegressor, AdaBoostRegressor, BaggingRegressor, HistGradientBoostingRegressor
from sklearn.naive_bayes import GaussianNB
!pip install catboost
from catboost import CatBoostRegressor
rf_regressor = RandomForestRegressor(n_estimators=1000, random_state=42)
rf_regressor.fit(x_train, y_train)
rf_pred = rf_regressor.predict(x_test)
# Evaluate the RandomForestRegressor
mse_rf = mean_squared_error(y_test, rf_pred)
r2_rf = r2_score(y_test, rf_pred)
print("Random Forest - Mean Squared Error: {}".format(mse_rf))
print("Random Forest - R-squared: {}".format(r2_rf))
print()
# AdaBoostRegressor
abc_regressor = AdaBoostRegressor()
abc_regressor.fit(x_train, y_train)
abc_pred = abc_regressor.predict(x_test)
# Evaluate the AdaBoostRegressor
mse_abc = mean_squared_error(y_test, abc_pred)
r2_abc = r2_score(y_test, abc_pred)
print("AdaBoost - Mean Squared Error: {}".format(mse_abc))
print("AdaBoost - R-squared: {}".format(r2_abc))
print()
# BaggingRegressor
bag_regressor = BaggingRegressor()
bag_regressor.fit(x_train, y_train)
bag_pred = bag_regressor.predict(x_test)
# Evaluate the BaggingRegressor
mse_bag = mean_squared_error(y_test, bag_pred)
r2_bag = r2_score(y_test, bag_pred)
print("Bagging - Mean Squared Error: {}".format(mse_bag))
print("Bagging - R-squared: {}".format(r2_bag))
print()
# HistGradientBoostingRegressor
hgbc_regressor = HistGradientBoostingRegressor()
hgbc_regressor.fit(x_train, y_train)
hgbc_pred = hgbc_regressor.predict(x_test)
# Evaluate the HistGradientBoostingRegressor
mse_hgbc = mean_squared_error(y_test, hgbc_pred)
r2_hgbc = r2_score(y_test, hgbc_pred)
print("Hist Gradient Boosting - Mean Squared Error: {}".format(mse_hgbc))
print("Hist Gradient Boosting - R-squared: {}".format(r2_hgbc))
print()
# GaussianNB (as a naive baseline for regression)
gnb_regressor = GaussianNB()
gnb_regressor.fit(x_train, y_train)
gnb_pred = gnb_regressor.predict(x_test)
# Evaluate the GaussianNB
mse_gnb = mean_squared_error(y_test, gnb_pred)
r2_gnb = r2_score(y_test, gnb_pred)
print("GaussianNB - Mean Squared Error: {}".format(mse_gnb))
print("GaussianNB - R-squared: {}".format(r2_gnb))
print()
# CatBoostRegressor
cbc_regressor = CatBoostRegressor(silent=True, depth=6, iterations=500)
cbc_regressor.fit(x_train, y_train)
cbc_pred = cbc_regressor.predict(x_test)
# Evaluate the CatBoostRegressor
mse_cbc = mean_squared_error(y_test, cbc_pred)
r2_cbc = r2_score(y_test, cbc_pred)
print("CatBoost - Mean Squared Error: {}".format(mse_cbc))
print("CatBoost - R-squared: {}".format(r2_cbc))