mirror of
https://github.com/Doctorado-ML/mufs.git
synced 2025-08-18 00:55:53 +00:00
Implement Metric methods and tests
This commit is contained in:
39
cfs/Entropy.py
Normal file
39
cfs/Entropy.py
Normal file
@@ -0,0 +1,39 @@
|
||||
##Entropy
|
||||
def entropy(Y):
|
||||
"""
|
||||
Also known as Shanon Entropy
|
||||
Reference: https://en.wikipedia.org/wiki/Entropy_(information_theory)
|
||||
"""
|
||||
unique, count = np.unique(Y, return_counts=True, axis=0)
|
||||
prob = count / len(Y)
|
||||
en = -np.sum(prob * np.log2(prob))
|
||||
return en
|
||||
|
||||
|
||||
# Joint Entropy
|
||||
def jEntropy(Y, X):
|
||||
"""
|
||||
H(Y;X)
|
||||
Reference: https://en.wikipedia.org/wiki/Joint_entropy
|
||||
"""
|
||||
YX = np.c_[Y, X]
|
||||
return entropy(YX)
|
||||
|
||||
|
||||
# Conditional Entropy
|
||||
def cEntropy(Y, X):
|
||||
"""
|
||||
conditional entropy = Joint Entropy - Entropy of X
|
||||
H(Y|X) = H(Y;X) - H(X)
|
||||
Reference: https://en.wikipedia.org/wiki/Conditional_entropy
|
||||
"""
|
||||
return jEntropy(Y, X) - entropy(X)
|
||||
|
||||
|
||||
# Information Gain
|
||||
def gain(Y, X):
|
||||
"""
|
||||
Information Gain, I(Y;X) = H(Y) - H(Y|X)
|
||||
Reference: https://en.wikipedia.org/wiki/Information_gain_in_decision_trees#Formal_definition
|
||||
"""
|
||||
return entropy(Y) - cEntropy(Y, X)
|
Reference in New Issue
Block a user