Skip to content

Commit

Permalink
ENH: weights_ -> estimator_weights_
Browse files Browse the repository at this point in the history
  • Loading branch information
glouppe committed Jan 23, 2013
1 parent 3d89ae1 commit 2497856
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 19 deletions.
4 changes: 2 additions & 2 deletions examples/ensemble/plot_adaboost_multiclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,11 @@
pl.xlim((-20, len(bdt_discrete) + 20))

pl.subplot(133)
pl.plot(n_trees, bdt_discrete.weights_, "b", label='SAMME')
pl.plot(n_trees, bdt_discrete.estimator_weights_, "b", label='SAMME')
pl.legend()
pl.ylabel('Weight')
pl.xlabel('Tree')
pl.ylim((0, bdt_discrete.weights_.max() * 1.2))
pl.ylim((0, bdt_discrete.estimator_weights_.max() * 1.2))
pl.xlim((-20, len(bdt_discrete) + 20))

# prevent overlapping y-axis labels
Expand Down
34 changes: 17 additions & 17 deletions sklearn/ensemble/weight_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def __init__(self,
n_estimators=n_estimators,
estimator_params=estimator_params)

self.weights_ = None
self.estimator_weights_ = None
self.errors_ = None
self.learning_rate = learning_rate
self.compute_importances = compute_importances
Expand Down Expand Up @@ -110,7 +110,7 @@ def fit(self, X, y, sample_weight=None):

# Clear any previous fit results
self.estimators_ = []
self.weights_ = np.zeros(self.n_estimators, dtype=np.float)
self.estimator_weights_ = np.zeros(self.n_estimators, dtype=np.float)
self.errors_ = np.ones(self.n_estimators, dtype=np.float)

for iboost in xrange(self.n_estimators):
Expand All @@ -124,7 +124,7 @@ def fit(self, X, y, sample_weight=None):
if sample_weight is None:
break

self.weights_[iboost] = weight
self.estimator_weights_[iboost] = weight
self.errors_[iboost] = error

# Stop if error is zero
Expand All @@ -142,10 +142,10 @@ def fit(self, X, y, sample_weight=None):
# Sum the importances
try:
if self.compute_importances:
norm = self.weights_.sum()
norm = self.estimator_weights_.sum()
self.feature_importances_ = (
sum(weight * clf.feature_importances_ for weight, clf
in zip(self.weights_, self.estimators_))
in zip(self.estimator_weights_, self.estimators_))
/ norm)

except AttributeError:
Expand Down Expand Up @@ -285,7 +285,7 @@ class AdaBoostClassifier(BaseWeightBoosting, ClassifierMixin):
`n_classes_` : int
The number of classes.
`weights_` : list of floats
`estimator_weights_` : list of floats
Weights for each estimator in the boosted ensemble.
`errors_` : list of floats
Expand Down Expand Up @@ -399,7 +399,7 @@ def _boost(self, iboost, X, y, sample_weight):
"""
if self.algorithm == "SAMME.R":
return self._boost_real(iboost, X, y, sample_weight)
else: # elif self.algorithm == "SAMME":
else: # elif self.algorithm == "SAMME":
return self._boost_discrete(iboost, X, y, sample_weight)

def _boost_real(self, iboost, X, y, sample_weight):
Expand Down Expand Up @@ -622,7 +622,7 @@ def decision_function(self, X, n_estimators=-1):
norm = 0.

for i, (weight, estimator) in enumerate(
zip(self.weights_, self.estimators_)):
zip(self.estimator_weights_, self.estimators_)):

if i == n_estimators:
break
Expand All @@ -631,7 +631,7 @@ def decision_function(self, X, n_estimators=-1):

if self.algorithm == "SAMME.R":
current_pred = _samme_proba(estimator, n_classes, X)
else: # elif self.algorithm == "SAMME":
else: # elif self.algorithm == "SAMME":
current_pred = estimator.predict(X)
current_pred = (current_pred == classes).T * weight

Expand Down Expand Up @@ -687,7 +687,7 @@ def staged_decision_function(self, X, n_estimators=-1):
norm = 0.

for i, (weight, estimator) in enumerate(
zip(self.weights_, self.estimators_)):
zip(self.estimator_weights_, self.estimators_)):

if i == n_estimators:
break
Expand All @@ -696,7 +696,7 @@ def staged_decision_function(self, X, n_estimators=-1):

if self.algorithm == "SAMME.R":
current_pred = _samme_proba(estimator, n_classes, X)
else: # elif self.algorithm == "SAMME":
else: # elif self.algorithm == "SAMME":
current_pred = estimator.predict(X)
current_pred = (current_pred == classes).T * weight

Expand Down Expand Up @@ -744,7 +744,7 @@ def predict_proba(self, X, n_estimators=-1):
proba = None

for i, (weight, estimator) in enumerate(
zip(self.weights_, self.estimators_)):
zip(self.estimator_weights_, self.estimators_)):

if i == n_estimators:
break
Expand Down Expand Up @@ -800,7 +800,7 @@ def staged_predict_proba(self, X, n_estimators=-1):
proba = None

for i, (weight, estimator) in enumerate(
zip(self.weights_, self.estimators_)):
zip(self.estimator_weights_, self.estimators_)):

if i == n_estimators:
break
Expand Down Expand Up @@ -882,7 +882,7 @@ class AdaBoostRegressor(BaseWeightBoosting, RegressorMixin):
`estimators_` : list of classifiers
The collection of fitted sub-estimators.
`weights_` : list of floats
`estimator_weights_` : list of floats
Weights for each estimator in the boosted ensemble.
`errors_` : list of floats
Expand Down Expand Up @@ -1054,7 +1054,7 @@ def predict(self, X, n_estimators=-1):
pred = None

for i, (weight, estimator) in enumerate(
zip(self.weights_, self.estimators_)):
zip(self.estimator_weights_, self.estimators_)):
if i == n_estimators:
break

Expand All @@ -1065,7 +1065,7 @@ def predict(self, X, n_estimators=-1):
else:
pred += current_pred * weight

pred /= self.weights_.sum()
pred /= self.estimator_weights_.sum()

return pred

Expand Down Expand Up @@ -1108,7 +1108,7 @@ def staged_predict(self, X, n_estimators=-1):
norm = 0.

for i, (weight, estimator) in enumerate(
zip(self.weights_, self.estimators_)):
zip(self.estimator_weights_, self.estimators_)):
if i == n_estimators:
break

Expand Down

0 comments on commit 2497856

Please sign in to comment.