diff --git a/docs/source/manuals/methods.md b/docs/source/manuals/methods.md index 1a9a2dc..9a02179 100644 --- a/docs/source/manuals/methods.md +++ b/docs/source/manuals/methods.md @@ -447,7 +447,7 @@ The [](quapy.method.composable) module allows the composition of quantification ```sh pip install --upgrade pip setuptools wheel pip install "jax[cpu]" -pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4" +pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.5" ``` ### Basics @@ -455,9 +455,16 @@ pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4" The composition of a method is implemented through the [](quapy.method.composable.ComposableQuantifier) class. Its documentation also features an example to get you started in composing your own methods. ```python +from quapy.method.composable import ( + ComposableQuantifier, + TikhonovRegularized, + LeastSquaresLoss, + ClassRepresentation, +) + ComposableQuantifier( # ordinal ACC, as proposed by Bunse et al., 2022 - TikhonovRegularized(LeastSquaresLoss(), 0.01), - ClassTransformer(RandomForestClassifier(oob_score=True)) + TikhonovRegularized(LeastSquaresLoss(), 0.01), + ClassRepresentation(RandomForestClassifier(oob_score=True)) ) ``` @@ -484,16 +491,16 @@ You can use the [](quapy.method.composable.CombinedLoss) to create arbitrary, we ### Feature transformations -- [](quapy.method.composable.ClassTransformer) -- [](quapy.method.composable.DistanceTransformer) -- [](quapy.method.composable.HistogramTransformer) -- [](quapy.method.composable.EnergyKernelTransformer) -- [](quapy.method.composable.GaussianKernelTransformer) -- [](quapy.method.composable.LaplacianKernelTransformer) -- [](quapy.method.composable.GaussianRFFKernelTransformer) +- [](quapy.method.composable.ClassRepresentation) +- [](quapy.method.composable.DistanceRepresentation) +- [](quapy.method.composable.HistogramRepresentation) +- [](quapy.method.composable.EnergyKernelRepresentation) +- [](quapy.method.composable.GaussianKernelRepresentation) +- [](quapy.method.composable.LaplacianKernelRepresentation) +- [](quapy.method.composable.GaussianRFFKernelRepresentation) ```{hint} -The [](quapy.method.composable.ClassTransformer) requires the classifier to have a property `oob_score==True` and to produce a property `oob_decision_function` during fitting. In [scikit-learn](https://scikit-learn.org/), this requirement is fulfilled by any bagging classifier, such as random forests. Any other classifier needs to be cross-validated through the [](quapy.method.composable.CVClassifier). +The [](quapy.method.composable.ClassRepresentation) requires the classifier to have a property `oob_score==True` and to produce a property `oob_decision_function` during fitting. In [scikit-learn](https://scikit-learn.org/), this requirement is fulfilled by any bagging classifier, such as random forests. Any other classifier needs to be cross-validated through the [](quapy.method.composable.CVClassifier). ``` diff --git a/examples/14.composable_methods.py b/examples/14.composable_methods.py index 5ffcb94..e8340d4 100644 --- a/examples/14.composable_methods.py +++ b/examples/14.composable_methods.py @@ -1,6 +1,6 @@ """ This example illustrates the composition of quantification methods from -arbitrary loss functions and feature transformations. It will extend the basic +arbitrary loss functions and feature representations. It will extend the basic example on the usage of quapy with this composition. This example requires the installation of qunfold, the back-end of QuaPy's @@ -8,7 +8,7 @@ composition module: pip install --upgrade pip setuptools wheel pip install "jax[cpu]" - pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4" + pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.5" """ import numpy as np @@ -24,20 +24,20 @@ data = qp.data.preprocessing.text2tfidf( training, testing = data.train_test # We start by recovering PACC from its building blocks, a LeastSquaresLoss and -# a probabilistic ClassTransformer. A 5-fold cross-validation is implemented +# a probabilistic ClassRepresentation. A 5-fold cross-validation is implemented # through a CVClassifier. from quapy.method.composable import ( ComposableQuantifier, LeastSquaresLoss, - ClassTransformer, + ClassRepresentation, CVClassifier, ) from sklearn.linear_model import LogisticRegression pacc = ComposableQuantifier( LeastSquaresLoss(), - ClassTransformer( + ClassRepresentation( CVClassifier(LogisticRegression(random_state=0), 5), is_probabilistic = True ), @@ -63,7 +63,7 @@ from quapy.method.composable import HellingerSurrogateLoss model = ComposableQuantifier( HellingerSurrogateLoss(), # the loss is different from before - ClassTransformer( # we use the same transformer + ClassRepresentation( # we use the same representation CVClassifier(LogisticRegression(random_state=0), 5), is_probabilistic = True ), @@ -79,7 +79,7 @@ absolute_errors = qp.evaluation.evaluate( print(f"MAE = {np.mean(absolute_errors):.4f}+-{np.std(absolute_errors):.4f}") # In general, any composed method solves a linear system of equations by -# minimizing the loss after transforming the data. Methods of this kind include +# minimizing the loss after representing the data. Methods of this kind include # ACC, PACC, HDx, HDy, and many other well-known methods, as well as an # unlimited number of re-combinations of their building blocks. @@ -93,18 +93,18 @@ from quapy.method.composable import CombinedLoss model = ComposableQuantifier( CombinedLoss(HellingerSurrogateLoss(), LeastSquaresLoss()), - ClassTransformer( + ClassRepresentation( CVClassifier(LogisticRegression(random_state=0), 5), is_probabilistic = True ), ) -from qunfold.quapy import QuaPyWrapper -from qunfold import GenericMethod +from quapy.method.composable import QUnfoldWrapper +from qunfold import LinearMethod -model = QuaPyWrapper(GenericMethod( +model = QUnfoldWrapper(LinearMethod( CombinedLoss(HellingerSurrogateLoss(), LeastSquaresLoss()), - ClassTransformer( + ClassRepresentation( CVClassifier(LogisticRegression(random_state=0), 5), is_probabilistic = True ), @@ -115,7 +115,7 @@ model = QuaPyWrapper(GenericMethod( param_grid = { "loss__weights": [ (w, 1-w) for w in [.1, .5, .9] ], - "transformer__classifier__estimator__C": [1e-1, 1e1], + "representation__classifier__estimator__C": [1e-1, 1e1], } grid_search = qp.model_selection.GridSearchQ(