From a89478b065c658d52f4bb6e142516001785f6138 Mon Sep 17 00:00:00 2001 From: gcattan Date: Tue, 5 Mar 2024 06:55:59 +0100 Subject: [PATCH] Feat/mdm predict distances (#248) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * - move workaround on _predict_distances from distance to classification - override only the instance method and not the class function * rename to "distance_logeuclid_cpm" to "distance_logeuclid_to_convex_hull_cpm" add "weights_euclid_cpm" * separate weights from distance functions * complete tests * add warnings * [pre-commit.ci] auto fixes from pre-commit.com hooks * Update pyriemann_qiskit/utils/distance.py Co-authored-by: Quentin Barthélemy * Update pyriemann_qiskit/utils/distance.py Co-authored-by: Quentin Barthélemy * Update pyriemann_qiskit/utils/distance.py Co-authored-by: Quentin Barthélemy * replace "_cpm" * [pre-commit.ci] auto fixes from pre-commit.com hooks * fix lint * set _weights_distance as private complete api.rst * [pre-commit.ci] auto fixes from pre-commit.com hooks * Update pyriemann_qiskit/utils/mean.py Co-authored-by: Quentin Barthélemy * Update pyriemann_qiskit/utils/distance.py Co-authored-by: Quentin Barthélemy * Update pyriemann_qiskit/classification.py Co-authored-by: Quentin Barthélemy * Update pyriemann_qiskit/utils/distance.py Co-authored-by: Quentin Barthélemy * update api.rst * replace pyQiskitOptimizer by :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer` * move and rename _predict_distance * [pre-commit.ci] auto fixes from pre-commit.com hooks * create module utils.utils * [pre-commit.ci] auto fixes from pre-commit.com hooks * improve code * [pre-commit.ci] auto fixes from pre-commit.com hooks --------- Co-authored-by: Gregoire Cattan Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Quentin Barthélemy --- benchmarks/light_benchmark.py | 4 +- doc/api.rst | 19 ++- examples/ERP/classify_P300_bi_quantum_mdm.py | 6 +- pyriemann_qiskit/classification.py | 67 ++++++--- pyriemann_qiskit/pipelines.py | 16 +- pyriemann_qiskit/utils/__init__.py | 2 + pyriemann_qiskit/utils/distance.py | 149 ++++++++++++------- pyriemann_qiskit/utils/docplex.py | 4 +- pyriemann_qiskit/utils/mean.py | 53 ++----- pyriemann_qiskit/utils/utils.py | 28 ++++ tests/test_utils_distance.py | 22 ++- tests/test_utils_mean.py | 18 +-- 12 files changed, 243 insertions(+), 145 deletions(-) create mode 100644 pyriemann_qiskit/utils/utils.py diff --git a/benchmarks/light_benchmark.py b/benchmarks/light_benchmark.py index 9ad973e8..6db1fad5 100644 --- a/benchmarks/light_benchmark.py +++ b/benchmarks/light_benchmark.py @@ -82,13 +82,13 @@ ) pipelines["QMDM_mean"] = QuantumMDMWithRiemannianPipeline( - metric={"mean": "euclid_cpm", "distance": "euclid"}, + metric={"mean": "qeuclid", "distance": "euclid"}, quantum=True, regularization=Shrinkage(shrinkage=0.9), ) pipelines["QMDM_dist"] = QuantumMDMWithRiemannianPipeline( - metric={"mean": "logeuclid", "distance": "logeuclid_cpm"}, quantum=True + metric={"mean": "logeuclid", "distance": "qlogeuclid_hull"}, quantum=True ) pipelines["RG_LDA"] = make_pipeline( diff --git a/doc/api.rst b/doc/api.rst index 06e1d570..abe88dd0 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -49,7 +49,17 @@ Ensemble Utils function -------------- -Utils functions are low level functions for the `classification` module. +Utils functions are low level functions for the `classification` and `pipelines` module. + +Utils +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _hyper_params_factory_api: +.. currentmodule:: pyriemann_qiskit.utils.utils + +.. autosummary:: + :toctree: generated/ + + is_qfunction Hyper-parameters generation ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -96,8 +106,8 @@ Mean .. autosummary:: :toctree: generated/ - mean_euclid_cpm - mean_logeuclid_cpm + qmean_euclid + qmean_logeuclid Distance ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -107,7 +117,8 @@ Distance .. autosummary:: :toctree: generated/ - distance_logeuclid_cpm + qdistance_logeuclid_to_convex_hull + weights_logeuclid_to_convex_hull Docplex ~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/examples/ERP/classify_P300_bi_quantum_mdm.py b/examples/ERP/classify_P300_bi_quantum_mdm.py index 8ecba8cc..a9209853 100644 --- a/examples/ERP/classify_P300_bi_quantum_mdm.py +++ b/examples/ERP/classify_P300_bi_quantum_mdm.py @@ -107,15 +107,15 @@ pipelines = {} -pipelines["mean=logeuclid_cpm/distance=logeuclid"] = QuantumMDMWithRiemannianPipeline( +pipelines["mean=qlogeuclid/distance=logeuclid"] = QuantumMDMWithRiemannianPipeline( metric="mean", quantum=quantum ) -pipelines["mean=logeuclid/distance=logeuclid_cpm"] = QuantumMDMWithRiemannianPipeline( +pipelines["mean=logeuclid/distance=qlogeuclid"] = QuantumMDMWithRiemannianPipeline( metric="distance", quantum=quantum ) -pipelines["Voting logeuclid_cpm"] = QuantumMDMVotingClassifier(quantum=quantum) +pipelines["Voting qlogeuclid"] = QuantumMDMVotingClassifier(quantum=quantum) ############################################################################## # Run evaluation diff --git a/pyriemann_qiskit/classification.py b/pyriemann_qiskit/classification.py index 98cf4df6..ea2f0b45 100644 --- a/pyriemann_qiskit/classification.py +++ b/pyriemann_qiskit/classification.py @@ -8,6 +8,7 @@ from scipy.special import softmax import logging import numpy as np +from warnings import warn from pyriemann.classification import MDM from pyriemann_qiskit.datasets import get_feature_dimension @@ -16,6 +17,8 @@ NaiveQAOAOptimizer, set_global_optimizer, ) +from pyriemann_qiskit.utils.distance import distance_functions +from pyriemann_qiskit.utils.utils import is_qfunction from qiskit.utils import QuantumInstance from qiskit.utils.quantum_instance import logger from qiskit_ibm_provider import IBMProvider, least_busy @@ -583,8 +586,8 @@ class QuanticMDM(QuanticClassifierBase): """Quantum-enhanced MDM classifier - This class is a quantic implementation of the Minimum Distance to Mean (MDM) - [1]_, which can run with quantum optimization. + This class is a quantic implementation of the Minimum Distance to Mean + (MDM) [1]_, which can run with quantum optimization. Only log-Euclidean distance between trial and class prototypes is supported at the moment, but any type of metric can be used for centroid estimation. @@ -600,37 +603,36 @@ class QuanticMDM(QuanticClassifierBase): Parameters ---------- - metric : string | dict, default={"mean": 'logeuclid', "distance": 'cpm'} + metric : string | dict, default={"mean": 'logeuclid', \ + "distance": 'qlogeuclid_hull'} The type of metric used for centroid and distance estimation. see `mean_covariance` for the list of supported metric. the metric could be a dict with two keys, `mean` and `distance` in order to pass different metrics for the centroid estimation and the - distance estimation. Typical usecase is to pass 'logeuclid' metric for - the mean in order to boost the computional speed and 'riemann' for the - distance in order to keep the good sensitivity for the classification. - quantum : bool (default: True) - Only applies if `metric` contains a cpm distance or mean. + distance estimation. + quantum : bool, default=True + Only applies if `metric` contains a quantic distance or mean. - If true will run on local or remote backend (depending on q_account_token value), - If false, will perform classical computing instead. - q_account_token : string (default:None) + q_account_token : string, default=None If `quantum` is True and `q_account_token` provided, the classification task will be running on a IBM quantum backend. If `load_account` is provided, the classifier will use the previous token saved with `IBMProvider.save_account()`. - verbose : bool (default:True) + verbose : bool, default=True If true, will output all intermediate results and logs. - shots : int (default:1024) + shots : int, default=1024 Number of repetitions of each circuit, for sampling. - seed: int | None (default: None) + seed : int | None, default=None Random seed for the simulation - upper_bound : int (default: 7) + upper_bound : int, default=7 The maximum integer value for matrix normalization. - regularization: MixinTransformer (defulat: None) + regularization : MixinTransformer, default=None Additional post-processing to regularize means. - classical_optimizer : OptimizationAlgorithm - An instance of OptimizationAlgorithm [3]_ + classical_optimizer : OptimizationAlgorithm, default=CobylaOptimizer() + An instance of OptimizationAlgorithm [3]_. See Also -------- @@ -655,7 +657,7 @@ class QuanticMDM(QuanticClassifierBase): def __init__( self, - metric={"mean": "logeuclid", "distance": "logeuclid_cpm"}, + metric={"mean": "logeuclid", "distance": "qlogeuclid_hull"}, quantum=True, q_account_token=None, verbose=True, @@ -673,9 +675,40 @@ def __init__( self.regularization = regularization self.classical_optimizer = classical_optimizer + @staticmethod + def _override_predict_distance(mdm): + """Override _predict_distances method of MDM. + + We override the _predict_distances method inside MDM to allow the use + of qdistance. + This is due to the fact the the signature of qdistances is different + from the usual distance functions. + """ + + def _predict_distances(X): + if is_qfunction(mdm.metric_dist): + if "hull" in mdm.metric_dist: + warn("qdistances to hull should not be use inside MDM") + else: + warn( + "q-distances for MDM are toy functions.\ + Use pyRiemann distances instead." + ) + distance = distance_functions[mdm.metric_dist] + centroids = np.array(mdm.covmeans_) + weights = [distance(centroids, x) for x in X] + return 1 - np.array(weights) + else: + return MDM._predict_distances(mdm, X) + + return _predict_distances + def _init_algo(self, n_features): self._log("Quantic MDM initiating algorithm") classifier = MDM(metric=self.metric) + classifier._predict_distances = QuanticMDM._override_predict_distance( + classifier + ) if self.quantum: self._log("Using NaiveQAOAOptimizer") self._optimizer = NaiveQAOAOptimizer( diff --git a/pyriemann_qiskit/pipelines.py b/pyriemann_qiskit/pipelines.py index fb4ac338..4c3afa5e 100644 --- a/pyriemann_qiskit/pipelines.py +++ b/pyriemann_qiskit/pipelines.py @@ -8,7 +8,7 @@ from pyriemann.estimation import XdawnCovariances, ERPCovariances from pyriemann.tangentspace import TangentSpace from pyriemann.preprocessing import Whitening -from pyriemann_qiskit.utils.mean import is_cpm_mean +from pyriemann_qiskit.utils.utils import is_qfunction from pyriemann_qiskit.utils.filtering import NoDimRed from pyriemann_qiskit.utils.hyper_params_factory import ( # gen_zz_feature_map, @@ -312,7 +312,7 @@ class QuantumMDMWithRiemannianPipeline(BasePipeline): Parameters ---------- - metric : string | dict, default={"mean": 'logeuclid', "distance": 'logeuclid_cpm'} + metric : string | dict, default={"mean": 'logeuclid', "distance": 'qlogeuclid'} The type of metric used for centroid and distance estimation. quantum : bool (default: True) - If true will run on local or remote backend @@ -361,7 +361,7 @@ class QuantumMDMWithRiemannianPipeline(BasePipeline): def __init__( self, - metric={"mean": "logeuclid", "distance": "logeuclid_cpm"}, + metric={"mean": "logeuclid", "distance": "qlogeuclid_hull"}, quantum=True, q_account_token=None, verbose=True, @@ -384,7 +384,7 @@ def __init__( def _create_pipe(self): print(self.metric) print(self.metric["mean"]) - if is_cpm_mean(self.metric["mean"]): + if is_qfunction(self.metric["mean"]): if self.quantum: covariances = XdawnCovariances( nfilter=1, estimator="scm", xdawn_estimator="lwf" @@ -418,8 +418,8 @@ class QuantumMDMVotingClassifier(BasePipeline): Voting classifier with two configurations of QuantumMDMWithRiemannianPipeline: - - with mean = euclid_cpm and distance = euclid, - - with mean = logeuclid and distance = logeuclid_cpm. + - with mean = qeuclid and distance = euclid, + - with mean = logeuclid and distance = qlogeuclid. Parameters ---------- @@ -472,7 +472,7 @@ def __init__( def _create_pipe(self): clf_mean_logeuclid_dist_cpm = QuantumMDMWithRiemannianPipeline( - {"mean": "logeuclid", "distance": "logeuclid_cpm"}, + {"mean": "logeuclid", "distance": "qlogeuclid_hull"}, self.quantum, self.q_account_token, self.verbose, @@ -480,7 +480,7 @@ def _create_pipe(self): self.upper_bound, ) clf_mean_cpm_dist_euclid = QuantumMDMWithRiemannianPipeline( - {"mean": "euclid_cpm", "distance": "euclid"}, + {"mean": "qeuclid", "distance": "euclid"}, self.quantum, self.q_account_token, self.verbose, diff --git a/pyriemann_qiskit/utils/__init__.py b/pyriemann_qiskit/utils/__init__.py index b4704ce1..545db409 100644 --- a/pyriemann_qiskit/utils/__init__.py +++ b/pyriemann_qiskit/utils/__init__.py @@ -20,6 +20,7 @@ ) from . import distance from . import mean +from . import utils __all__ = [ "hyper_params_factory", @@ -45,4 +46,5 @@ "filter_subjects_by_incomplete_results", "add_moabb_dataframe_results_to_caches", "convert_caches_to_dataframes", + "utils", ] diff --git a/pyriemann_qiskit/utils/distance.py b/pyriemann_qiskit/utils/distance.py index eb2b8df9..28fdc732 100644 --- a/pyriemann_qiskit/utils/distance.py +++ b/pyriemann_qiskit/utils/distance.py @@ -1,8 +1,11 @@ import numpy as np from docplex.mp.model import Model from pyriemann_qiskit.utils.docplex import ClassicalOptimizer, get_global_optimizer -from pyriemann.classification import MDM -from pyriemann.utils.distance import distance_functions, distance_logeuclid +from pyriemann.utils.distance import ( + distance_functions, + distance_logeuclid, + distance_euclid, +) from pyriemann.utils.base import logm from pyriemann.utils.mean import mean_logeuclid from typing_extensions import deprecated @@ -10,13 +13,13 @@ @deprecated( "logeucl_dist_convex is deprecated and will be removed in 0.3.0; " - "please use distance_logeuclid_cpm." + "please use weights_logeuclid_to_convex_hull." ) def logeucl_dist_convex(): pass -def distance_logeuclid_cpm(A, B, optimizer=ClassicalOptimizer(), return_weights=False): +def qdistance_logeuclid_to_convex_hull(A, B, optimizer=ClassicalOptimizer()): """Log-Euclidean distance to a convex hull of SPD matrices. Log-Euclidean distance between a SPD matrix B and the convex hull of a set @@ -29,10 +32,9 @@ def distance_logeuclid_cpm(A, B, optimizer=ClassicalOptimizer(), return_weights= Set of SPD matrices. B : ndarray, shape (n_channels, n_channels) SPD matrix. - optimizer: pyQiskitOptimizer - An instance of pyQiskitOptimizer. - return_weights : bool, default=False - Whether to return optimized weights. + optimizer : pyQiskitOptimizer, default=ClassicalOptimizer() + An instance of + :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. Returns ------- @@ -40,15 +42,59 @@ def distance_logeuclid_cpm(A, B, optimizer=ClassicalOptimizer(), return_weights= Log-Euclidean distance between the SPD matrix B and the convex hull of the set of SPD matrices A, defined as the distance between B and the matrix of the convex hull closest to matrix B. + + Notes + ----- + .. versionadded:: 0.2.0 + + References + ---------- + .. [1] \ + K. Zhao, A. Wiliem, S. Chen, and B. C. Lovell, + ‘Convex Class Model on Symmetric Positive Definite Manifolds’, + Image and Vision Computing, 2019. + .. [2] \ + http://ibmdecisionoptimization.github.io/docplex-doc/cp/creating_model.html + + """ + weights = weights_logeuclid_to_convex_hull(A, B, optimizer) + # compute nearest matrix + C = mean_logeuclid(A, weights) + distance = distance_logeuclid(C, B) + + return distance + + +def weights_logeuclid_to_convex_hull(A, B, optimizer=ClassicalOptimizer()): + """Weights for Log-Euclidean distance to a convex hull of SPD matrices. + + Weights for Log-Euclidean distance between a SPD matrix B + and the convex hull of a set of SPD matrices A [1]_, + formulated as a Constraint Programming Model (CPM) [2]_. + + Parameters + ---------- + A : ndarray, shape (n_matrices, n_channels, n_channels) + Set of SPD matrices. + B : ndarray, shape (n_channels, n_channels) + SPD matrix. + optimizer : pyQiskitOptimizer, default=ClassicalOptimizer() + An instance of + :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. + + Returns + ------- weights : ndarray, shape (n_matrices,) - If return_weights is True, - it returns the optimized weights for the set of SPD matrices A. + Optimized weights for the set of SPD matrices A. Using these weights, the weighted Log-Euclidean mean of set A provides the matrix of the convex hull closest to matrix B. Notes ----- .. versionadded:: 0.0.4 + .. versionchanged:: 0.2.0 + Rename from `logeucl_dist_convex` to `weights_logeuclid_to_convex_hull`. + Add linear constraint on weights (sum = 1). References ---------- @@ -68,58 +114,48 @@ def log_prod(m1, m2): prob = Model() optimizer = get_global_optimizer(optimizer) - # should be part of the optimizer w = optimizer.get_weights(prob, matrices) wtLogAtLogAw = prob.sum( w[i] * w[j] * log_prod(A[i], A[j]) for i in matrices for j in matrices ) wLogBLogA = prob.sum(w[i] * log_prod(B, A[i]) for i in matrices) - objectives = wtLogAtLogAw - 2 * wLogBLogA + objective = wtLogAtLogAw - 2 * wLogBLogA - prob.set_objective("min", objectives) + prob.set_objective("min", objective) prob.add_constraint(prob.sum(w) == 1) weights = optimizer.solve(prob, reshape=False) - # compute nearest matrix and distance - C = mean_logeuclid(A, weights) - distance = distance_logeuclid(C, B) - - if return_weights: - return distance, weights - return distance - + return weights -_mdm_predict_distances_original = MDM._predict_distances +def _weights_distance( + A, B, distance=distance_logeuclid, optimizer=ClassicalOptimizer() +): + """`distance` weights between a SPD and a set of SPD matrices. -def predict_distances(mdm, X): - if mdm.metric_dist == "logeuclid_cpm": - centroids = np.array(mdm.covmeans_) - - weights = [ - distance_logeuclid_cpm(centroids, x, return_weights=True)[1] for x in X - ] - return 1 - np.array(weights) - else: - return _mdm_predict_distances_original(mdm, X) - - -def is_cpm_dist(string): - """Indicates if the distance is a CPM distance. - - Return True is "string" represents a Constraint Programming Model (CPM) [1]_ - distance available in the library. + `distance` weights between a SPD matrix B and each SPD matrix inside A, + formulated as a Constraint Programming Model (CPM) [1]_. + The higher weight corresponds to the closer SPD matrix inside A, + which is closer to B. Parameters ---------- - string: str - A string representation of the distance. + A : ndarray, shape (n_matrices, n_channels, n_channels) + Set of SPD matrices. + B : ndarray, shape (n_channels, n_channels) + SPD matrix. + distance : Callable[[ndarray, ndarray], float] + One of the pyRiemann distance. + optimizer : pyQiskitOptimizer, default=ClassicalOptimizer() + An instance of :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. Returns ------- - is_cpm_dist : boolean - True if "string" represents a CPM distance available in the library. + weights : ndarray, shape (n_matrices,) + Optimized weights for the set of SPD matrices A. + The higher weight corresponds to the closer SPD matrix inside A, + which is closer to B. Notes ----- @@ -131,14 +167,25 @@ def is_cpm_dist(string): http://ibmdecisionoptimization.github.io/docplex-doc/cp/creating_model.html """ - return "_cpm" in string and string in distance_functions + n_matrices, _, _ = A.shape + matrices = range(n_matrices) + + prob = Model() + optimizer = get_global_optimizer(optimizer) + w = optimizer.get_weights(prob, matrices) + + objectif = prob.sum(w[i] * distance(B, A[i]) for i in matrices) -MDM._predict_distances = predict_distances + prob.set_objective("min", objectif) + prob.add_constraint(prob.sum(w) == 1) + weights = optimizer.solve(prob, reshape=False) -# This is only for validation inside the MDM. -# In fact, we override the _predict_distances method -# inside MDM to directly use distance_logeuclid_cpm when the metric is "logeuclid_cpm" -# This is due to the fact the the signature of this method is different from -# the usual distance functions. -distance_functions["logeuclid_cpm"] = distance_logeuclid_cpm + return weights + + +distance_functions["qlogeuclid_hull"] = weights_logeuclid_to_convex_hull +distance_functions["qeuclid"] = lambda A, B: _weights_distance(A, B, distance_euclid) +distance_functions["qlogeuclid"] = lambda A, B: _weights_distance( + A, B, distance_logeuclid +) diff --git a/pyriemann_qiskit/utils/docplex.py b/pyriemann_qiskit/utils/docplex.py index 5e94b2d6..5e87aa84 100644 --- a/pyriemann_qiskit/utils/docplex.py +++ b/pyriemann_qiskit/utils/docplex.py @@ -25,7 +25,7 @@ def set_global_optimizer(optimizer): Parameters ---------- optimizer: pyQiskitOptimizer - An instance of pyQiskitOptimizer. + An instance of :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. Notes ----- @@ -40,7 +40,7 @@ def get_global_optimizer(default): Parameters ---------- default: pyQiskitOptimizer - An instance of pyQiskitOptimizer. + An instance of :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. It will be returned by default if the global optimizer is None. Returns diff --git a/pyriemann_qiskit/utils/mean.py b/pyriemann_qiskit/utils/mean.py index e169efd0..a11c642f 100644 --- a/pyriemann_qiskit/utils/mean.py +++ b/pyriemann_qiskit/utils/mean.py @@ -8,13 +8,13 @@ @deprecated( "fro_mean_convex is deprecated and will be removed in 0.3.0; " - "please use mean_euclid_cpm." + "please use qmean_euclid." ) def fro_mean_convex(): pass -def mean_euclid_cpm(X, sample_weight=None, optimizer=ClassicalOptimizer()): +def qmean_euclid(X, sample_weight=None, optimizer=ClassicalOptimizer()): """Euclidean mean with Constraint Programming Model. Constraint Programming Model (CPM) [1]_ formulation of the mean @@ -27,8 +27,8 @@ def mean_euclid_cpm(X, sample_weight=None, optimizer=ClassicalOptimizer()): sample_weights : None | ndarray, shape (n_matrices,), default=None Weights for each matrix. Never used in practice. It is kept only for standardization with pyRiemann. - optimizer : pyQiskitOptimizer - An instance of pyQiskitOptimizer. + optimizer : pyQiskitOptimizer, default=ClassicalOptimizer() + An instance of :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. Returns ------- @@ -41,8 +41,8 @@ def mean_euclid_cpm(X, sample_weight=None, optimizer=ClassicalOptimizer()): .. versionchanged:: 0.0.4 Add regularization of the results. .. versionchanged:: 0.2.0 - Rename from `fro_mean_convex` to `mean_euclid_cpm` - Remove shrinkage + Rename from `fro_mean_convex` to `qmean_euclid`. + Remove shrinkage. References ---------- @@ -73,7 +73,7 @@ def _dist_euclid(A, B): return result -def mean_logeuclid_cpm( +def qmean_logeuclid( X, sample_weight=None, optimizer=ClassicalOptimizer(optimizer=ADMMOptimizer()) ): """Log-Euclidean mean with Constraint Programming Model. @@ -88,8 +88,8 @@ def mean_logeuclid_cpm( sample_weights : None | ndarray, shape (n_matrices,), default=None Weights for each matrix. Never used in practice. It is kept only for standardization with pyRiemann. - optimizer : pyQiskitOptimizer - An instance of pyQiskitOptimizer. + optimizer : pyQiskitOptimizer, default=ClassicalOptimizer() + An instance of :class:`pyriemann_qiskit.utils.docplex.pyQiskitOptimizer`. Returns ------- @@ -112,38 +112,9 @@ def mean_logeuclid_cpm( """ log_X = logm(X) - result = mean_euclid_cpm(log_X, sample_weight, optimizer) + result = qmean_euclid(log_X, sample_weight, optimizer) return expm(result) -def is_cpm_mean(string): - """Indicates if the mean is a CPM mean. - - Return True is "string" represents a Constraint Programming Model (CPM) [1]_ - mean available in the library. - - Parameters - ---------- - string: str - A string representation of the mean. - - Returns - ------- - is_cpm_mean : boolean - True if "string" represents a CPM mean aailable in the library. - - Notes - ----- - .. versionadded:: 0.2.0 - - References - ---------- - .. [1] \ - http://ibmdecisionoptimization.github.io/docplex-doc/cp/creating_model.html - - """ - return "_cpm" in string and string in mean_functions - - -mean_functions["euclid_cpm"] = mean_euclid_cpm -mean_functions["logeuclid_cpm"] = mean_logeuclid_cpm +mean_functions["qeuclid"] = qmean_euclid +mean_functions["qlogeuclid"] = qmean_logeuclid diff --git a/pyriemann_qiskit/utils/utils.py b/pyriemann_qiskit/utils/utils.py new file mode 100644 index 00000000..6ffaf91a --- /dev/null +++ b/pyriemann_qiskit/utils/utils.py @@ -0,0 +1,28 @@ +from .mean import mean_functions +from .distance import distance_functions + + +def is_qfunction(string): + """Indicates if the function is a mean or a distance introduced in this library. + + Return True is "string" represents a + mean or a distance introduced in this library. + + Parameters + ---------- + string: str + A string representation of the mean/distance. + + Returns + ------- + is_qfunction : boolean + True if "string" represents a mean or a distance introduced in this library. + + Notes + ----- + .. versionadded:: 0.2.0 + + """ + return string[0] == "q" and ( + (string in mean_functions) or (string in distance_functions) + ) diff --git a/tests/test_utils_distance.py b/tests/test_utils_distance.py index 757781dc..9c08e622 100644 --- a/tests/test_utils_distance.py +++ b/tests/test_utils_distance.py @@ -4,18 +4,24 @@ ClassicalOptimizer, NaiveQAOAOptimizer, ) -from pyriemann_qiskit.utils.distance import distance_logeuclid_cpm +from pyriemann_qiskit.utils.distance import weights_logeuclid_to_convex_hull +from pyriemann_qiskit.classification import QuanticMDM from pyriemann_qiskit.datasets import get_mne_sample -from pyriemann.classification import MDM from pyriemann.estimation import XdawnCovariances from sklearn.pipeline import make_pipeline from sklearn.model_selection import StratifiedKFold, cross_val_score -def test_performance(): - metric = {"mean": "logeuclid", "distance": "logeuclid_cpm"} - - clf = make_pipeline(XdawnCovariances(), MDM(metric=metric)) +@pytest.mark.parametrize( + "metric", + [ + {"mean": "euclid", "distance": "qeuclid"}, + {"mean": "logeuclid", "distance": "qlogeuclid"}, + {"mean": "logeuclid", "distance": "qlogeuclid_hull"}, + ], +) +def test_performance(metric): + clf = make_pipeline(XdawnCovariances(), QuanticMDM(metric=metric, quantum=False)) skf = StratifiedKFold(n_splits=3) covset, labels = get_mne_sample() score = cross_val_score(clf, covset, labels, cv=skf, scoring="roc_auc") @@ -23,11 +29,11 @@ def test_performance(): @pytest.mark.parametrize("optimizer", [ClassicalOptimizer(), NaiveQAOAOptimizer()]) -def test_distance_logeuclid_cpm(optimizer): +def test_distance_logeuclid_to_convex_hull_cpm(optimizer): X_0 = np.array([[0.9, 1.1], [0.9, 1.1]]) X_1 = X_0 + 1 X = np.stack((X_0, X_1)) y = (X_0 + X_1) / 3 - _, weights = distance_logeuclid_cpm(X, y, optimizer=optimizer, return_weights=True) + weights = weights_logeuclid_to_convex_hull(X, y, optimizer=optimizer) distances = 1 - weights assert distances.argmin() == 0 diff --git a/tests/test_utils_mean.py b/tests/test_utils_mean.py index 2854ad5d..c14975b4 100644 --- a/tests/test_utils_mean.py +++ b/tests/test_utils_mean.py @@ -4,7 +4,7 @@ from pyriemann.estimation import XdawnCovariances, Shrinkage from sklearn.pipeline import make_pipeline from sklearn.model_selection import StratifiedKFold, cross_val_score -from pyriemann_qiskit.utils.mean import mean_euclid_cpm, mean_logeuclid_cpm +from pyriemann_qiskit.utils.mean import qmean_euclid, qmean_logeuclid from pyriemann_qiskit.utils import ClassicalOptimizer, NaiveQAOAOptimizer from pyriemann_qiskit.classification import QuanticMDM from pyriemann_qiskit.datasets import get_mne_sample @@ -14,8 +14,8 @@ @pytest.mark.parametrize( "kernel", [ - ({"mean": "euclid_cpm", "distance": "euclid"}, Shrinkage(shrinkage=0.9)), - ({"mean": "logeuclid_cpm", "distance": "logeuclid"}, Shrinkage(shrinkage=0.9)), + ({"mean": "qeuclid", "distance": "euclid"}, Shrinkage(shrinkage=0.9)), + ({"mean": "qlogeuclid", "distance": "logeuclid"}, Shrinkage(shrinkage=0.9)), ], ) def test_performance(kernel): @@ -33,7 +33,7 @@ def test_performance(kernel): @pytest.mark.parametrize( - "means", [(mean_euclid, mean_euclid_cpm), (mean_logeuclid, mean_logeuclid_cpm)] + "means", [(mean_euclid, qmean_euclid), (mean_logeuclid, qmean_logeuclid)] ) def test_analytic_vs_cpm_mean(get_covmats, means): """Test that analytic and cpm mean returns close results""" @@ -45,7 +45,7 @@ def test_analytic_vs_cpm_mean(get_covmats, means): assert np.allclose(C, C_analytic, atol=0.00001) -@pytest.mark.parametrize("mean", [mean_euclid_cpm, mean_logeuclid_cpm]) +@pytest.mark.parametrize("mean", [qmean_euclid, qmean_logeuclid]) def test_mean_cpm_shape(get_covmats, mean): """Test the shape of mean""" n_trials, n_channels = 5, 3 @@ -58,7 +58,7 @@ def test_mean_cpm_shape(get_covmats, mean): "optimizer", [ClassicalOptimizer(optimizer=ADMMOptimizer()), NaiveQAOAOptimizer()], ) -@pytest.mark.parametrize("mean", [mean_euclid_cpm]) +@pytest.mark.parametrize("mean", [qmean_euclid]) def test_mean_cpm_all_zeros(optimizer, mean): """Test that the mean of covariance matrices containing zeros is a matrix filled with zeros""" @@ -72,7 +72,7 @@ def test_mean_cpm_all_zeros(optimizer, mean): "optimizer", [ClassicalOptimizer(optimizer=ADMMOptimizer()), NaiveQAOAOptimizer()], ) -@pytest.mark.parametrize("mean", [mean_euclid_cpm]) +@pytest.mark.parametrize("mean", [qmean_euclid]) def test_mean_cpm_all_ones(optimizer, mean): """Test that the mean of covariance matrices containing ones is a matrix filled with ones""" @@ -86,7 +86,7 @@ def test_mean_cpm_all_ones(optimizer, mean): "optimizer", [ClassicalOptimizer(optimizer=ADMMOptimizer()), NaiveQAOAOptimizer()], ) -@pytest.mark.parametrize("mean", [mean_euclid_cpm]) +@pytest.mark.parametrize("mean", [qmean_euclid]) def test_mean_cpm_all_equals(optimizer, mean): """Test that the mean of covariance matrices filled with the same value is a matrix identical to the input""" @@ -100,7 +100,7 @@ def test_mean_cpm_all_equals(optimizer, mean): "optimizer", [ClassicalOptimizer(optimizer=ADMMOptimizer()), NaiveQAOAOptimizer()], ) -@pytest.mark.parametrize("mean", [mean_euclid_cpm]) +@pytest.mark.parametrize("mean", [qmean_euclid]) def test_mean_cpm_mixed(optimizer, mean): """Test that the mean of covariances matrices with zero and ones is a matrix filled with 0.5"""