-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathBoost.py
More file actions
53 lines (40 loc) · 2.1 KB
/
Boost.py
File metadata and controls
53 lines (40 loc) · 2.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.ensemble import AdaBoostClassifier,GradientBoostingClassifier
from xgboost import XGBClassifier
from sklearn.metrics import accuracy_score, classification_report
from sklearn.datasets import make_classification
# Generate synthetic dataset
X, y = make_classification(n_samples=1000, n_features=20,n_informative=15,n_redundant=5, random_state=42)
# Split dataset into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2,
random_state=42)
# 1. Implementing AdaBoost Classifier
ada_boost = AdaBoostClassifier(n_estimators=50, learning_rate=1.0,random_state=42)
ada_boost.fit(X_train, y_train)
y_pred_ada = ada_boost.predict(X_test)
print("AdaBoost Classifier Accuracy:", accuracy_score(y_test,y_pred_ada))
print("AdaBoost Classification Report:\n",classification_report(y_test, y_pred_ada))
# 2. Implementing Gradient Boosting Classifier
grad_boost = GradientBoostingClassifier(n_estimators=100,learning_rate=0.1,max_depth=3, random_state=42)
grad_boost.fit(X_train, y_train)
y_pred_grad = grad_boost.predict(X_test)
print("Gradient Boosting Classifier Accuracy:",accuracy_score(y_test, y_pred_grad))
print("Gradient Boosting Classification Report:\n",classification_report(y_test, y_pred_grad))
# 3. Implementing XGBoost Classifier
xgb_model = XGBClassifier(n_estimators=100, learning_rate=0.1,max_depth=3, random_state=42, eval_metric='logloss')
xgb_model.fit(X_train, y_train)
y_pred_xgb = xgb_model.predict(X_test)
print("XGBoost Classifier Accuracy:", accuracy_score(y_test,y_pred_xgb))
print("XGBoost Classification Report:\n",classification_report(y_test, y_pred_xgb))
# Plot feature importance for Gradient Boosting and XGBoost
plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1)
plt.barh(range(len(grad_boost.feature_importances_)),grad_boost.feature_importances_)
plt.title("Gradient Boosting Feature Importance")
plt.subplot(1, 2, 2)
plt.barh(range(len(xgb_model.feature_importances_)),
xgb_model.feature_importances_)
plt.title("XGBoost Feature Importance")
plt.show()