From aadb185d2516ff25a3866ca367bdc5fdadff2eae Mon Sep 17 00:00:00 2001 From: Adam Li Date: Mon, 6 Nov 2023 16:03:51 -0500 Subject: [PATCH] Consolidate and use the same function Signed-off-by: Adam Li --- .spin/cmds.py | 2 +- sktree/tests/test_honest_forest.py | 12 ++++-------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/.spin/cmds.py b/.spin/cmds.py index c4e01bcad..8fff09521 100644 --- a/.spin/cmds.py +++ b/.spin/cmds.py @@ -137,7 +137,7 @@ def setup_submodule(forcesubmodule=False): def build(ctx, meson_args, jobs=None, clean=False, forcesubmodule=False, verbose=False): """Build scikit-tree using submodules. - git submodule update --recursive --remote + git submodule update --recursive --remote To update submodule wrt latest commits: diff --git a/sktree/tests/test_honest_forest.py b/sktree/tests/test_honest_forest.py index d249f5e0c..bccb04bf6 100644 --- a/sktree/tests/test_honest_forest.py +++ b/sktree/tests/test_honest_forest.py @@ -1,7 +1,6 @@ import numpy as np import pytest from numpy.testing import assert_allclose, assert_array_almost_equal -from scipy.stats import entropy from sklearn import datasets from sklearn.metrics import accuracy_score, r2_score, roc_auc_score from sklearn.model_selection import cross_val_score @@ -358,16 +357,13 @@ def test_honest_forest_with_sklearn_trees_with_mi(): Forest.fit(X, y) # compute MI - _, counts = np.unique(y, return_counts=True) - H_Y = entropy(counts, base=np.exp(1)) - + # _, counts = np.unique(y, return_counts=True) + # H_Y = entropy(counts, base=np.exp(1)) sk_posterior = skForest.predict_proba(X) - H_YX = np.mean(entropy(sk_posterior, base=np.exp(1), axis=1)) - sk_score = max(H_Y - H_YX, 0) + sk_score = _mutual_information(y, sk_posterior) posterior = Forest.predict_proba(X) - H_YX = np.mean(entropy(posterior, base=np.exp(1), axis=1)) - score = max(H_Y - H_YX, 0) + score = _mutual_information(y, posterior) scores.append(score) sk_scores.append(sk_score)