mirror of
https://github.com/Doctorado-ML/STree.git
synced 2025-08-17 16:36:01 +00:00
Compare commits
1 Commits
graphviz
...
entropy_fu
Author | SHA1 | Date | |
---|---|---|---|
7a625eee09
|
@@ -478,18 +478,6 @@ class Splitter:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _entropy(y: np.array) -> float:
|
def _entropy(y: np.array) -> float:
|
||||||
"""Compute entropy of a labels set
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
y : np.array
|
|
||||||
set of labels
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
float
|
|
||||||
entropy
|
|
||||||
"""
|
|
||||||
n_labels = len(y)
|
n_labels = len(y)
|
||||||
if n_labels <= 1:
|
if n_labels <= 1:
|
||||||
return 0
|
return 0
|
||||||
@@ -497,13 +485,10 @@ class Splitter:
|
|||||||
proportions = counts / n_labels
|
proportions = counts / n_labels
|
||||||
n_classes = np.count_nonzero(proportions)
|
n_classes = np.count_nonzero(proportions)
|
||||||
if n_classes <= 1:
|
if n_classes <= 1:
|
||||||
return 0
|
return 0.0
|
||||||
entropy = 0.0
|
from scipy.stats import entropy
|
||||||
# Compute standard entropy.
|
|
||||||
for prop in proportions:
|
return entropy(y, base=n_classes)
|
||||||
if prop != 0.0:
|
|
||||||
entropy -= prop * log(prop, n_classes)
|
|
||||||
return entropy
|
|
||||||
|
|
||||||
def information_gain(
|
def information_gain(
|
||||||
self, labels: np.array, labels_up: np.array, labels_dn: np.array
|
self, labels: np.array, labels_up: np.array, labels_dn: np.array
|
||||||
|
Reference in New Issue
Block a user