Merge branch 'mirkobunse-devel' into devel
This commit is contained in:
commit
dbd3eafeba
|
|
@ -28,7 +28,7 @@ jobs:
|
|||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel
|
||||
python -m pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4"
|
||||
python -m pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@main"
|
||||
python -m pip install -e .[bayes,tests]
|
||||
- name: Test with unittest
|
||||
run: python -m unittest
|
||||
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel "jax[cpu]"
|
||||
python -m pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4"
|
||||
python -m pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@main"
|
||||
python -m pip install -e .[neural,docs]
|
||||
- name: Build documentation
|
||||
run: sphinx-build -M html docs/source docs/build
|
||||
|
|
|
|||
|
|
@ -465,9 +465,16 @@ pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.5"
|
|||
The composition of a method is implemented through the [](quapy.method.composable.ComposableQuantifier) class. Its documentation also features an example to get you started in composing your own methods.
|
||||
|
||||
```python
|
||||
from quapy.method.composable import (
|
||||
ComposableQuantifier,
|
||||
TikhonovRegularized,
|
||||
LeastSquaresLoss,
|
||||
ClassRepresentation,
|
||||
)
|
||||
|
||||
ComposableQuantifier( # ordinal ACC, as proposed by Bunse et al., 2022
|
||||
TikhonovRegularized(LeastSquaresLoss(), 0.01),
|
||||
ClassTransformer(RandomForestClassifier(oob_score=True))
|
||||
TikhonovRegularized(LeastSquaresLoss(), 0.01),
|
||||
ClassRepresentation(RandomForestClassifier(oob_score=True))
|
||||
)
|
||||
```
|
||||
|
||||
|
|
@ -494,16 +501,16 @@ You can use the [](quapy.method.composable.CombinedLoss) to create arbitrary, we
|
|||
|
||||
### Feature transformations
|
||||
|
||||
- [](quapy.method.composable.ClassTransformer)
|
||||
- [](quapy.method.composable.DistanceTransformer)
|
||||
- [](quapy.method.composable.HistogramTransformer)
|
||||
- [](quapy.method.composable.EnergyKernelTransformer)
|
||||
- [](quapy.method.composable.GaussianKernelTransformer)
|
||||
- [](quapy.method.composable.LaplacianKernelTransformer)
|
||||
- [](quapy.method.composable.GaussianRFFKernelTransformer)
|
||||
- [](quapy.method.composable.ClassRepresentation)
|
||||
- [](quapy.method.composable.DistanceRepresentation)
|
||||
- [](quapy.method.composable.HistogramRepresentation)
|
||||
- [](quapy.method.composable.EnergyKernelRepresentation)
|
||||
- [](quapy.method.composable.GaussianKernelRepresentation)
|
||||
- [](quapy.method.composable.LaplacianKernelRepresentation)
|
||||
- [](quapy.method.composable.GaussianRFFKernelRepresentation)
|
||||
|
||||
```{hint}
|
||||
The [](quapy.method.composable.ClassTransformer) requires the classifier to have a property `oob_score==True` and to produce a property `oob_decision_function` during fitting. In [scikit-learn](https://scikit-learn.org/), this requirement is fulfilled by any bagging classifier, such as random forests. Any other classifier needs to be cross-validated through the [](quapy.method.composable.CVClassifier).
|
||||
The [](quapy.method.composable.ClassRepresentation) requires the classifier to have a property `oob_score==True` and to produce a property `oob_decision_function` during fitting. In [scikit-learn](https://scikit-learn.org/), this requirement is fulfilled by any bagging classifier, such as random forests. Any other classifier needs to be cross-validated through the [](quapy.method.composable.CVClassifier).
|
||||
```
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"""
|
||||
This example illustrates the composition of quantification methods from
|
||||
arbitrary loss functions and feature transformations. It will extend the basic
|
||||
arbitrary loss functions and feature representations. It will extend the basic
|
||||
example on the usage of quapy with this composition.
|
||||
|
||||
This example requires the installation of qunfold, the back-end of QuaPy's
|
||||
|
|
@ -8,7 +8,7 @@ composition module:
|
|||
|
||||
pip install --upgrade pip setuptools wheel
|
||||
pip install "jax[cpu]"
|
||||
pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4"
|
||||
pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.5"
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
|
|
@ -24,20 +24,20 @@ data = qp.data.preprocessing.text2tfidf(
|
|||
training, testing = data.train_test
|
||||
|
||||
# We start by recovering PACC from its building blocks, a LeastSquaresLoss and
|
||||
# a probabilistic ClassTransformer. A 5-fold cross-validation is implemented
|
||||
# a probabilistic ClassRepresentation. A 5-fold cross-validation is implemented
|
||||
# through a CVClassifier.
|
||||
|
||||
from quapy.method.composable import (
|
||||
ComposableQuantifier,
|
||||
LeastSquaresLoss,
|
||||
ClassTransformer,
|
||||
ClassRepresentation,
|
||||
CVClassifier,
|
||||
)
|
||||
from sklearn.linear_model import LogisticRegression
|
||||
|
||||
pacc = ComposableQuantifier(
|
||||
LeastSquaresLoss(),
|
||||
ClassTransformer(
|
||||
ClassRepresentation(
|
||||
CVClassifier(LogisticRegression(random_state=0), 5),
|
||||
is_probabilistic = True
|
||||
),
|
||||
|
|
@ -63,7 +63,7 @@ from quapy.method.composable import HellingerSurrogateLoss
|
|||
|
||||
model = ComposableQuantifier(
|
||||
HellingerSurrogateLoss(), # the loss is different from before
|
||||
ClassTransformer( # we use the same transformer
|
||||
ClassRepresentation( # we use the same representation
|
||||
CVClassifier(LogisticRegression(random_state=0), 5),
|
||||
is_probabilistic = True
|
||||
),
|
||||
|
|
@ -79,7 +79,7 @@ absolute_errors = qp.evaluation.evaluate(
|
|||
print(f"MAE = {np.mean(absolute_errors):.4f}+-{np.std(absolute_errors):.4f}")
|
||||
|
||||
# In general, any composed method solves a linear system of equations by
|
||||
# minimizing the loss after transforming the data. Methods of this kind include
|
||||
# minimizing the loss after representing the data. Methods of this kind include
|
||||
# ACC, PACC, HDx, HDy, and many other well-known methods, as well as an
|
||||
# unlimited number of re-combinations of their building blocks.
|
||||
|
||||
|
|
@ -93,18 +93,18 @@ from quapy.method.composable import CombinedLoss
|
|||
|
||||
model = ComposableQuantifier(
|
||||
CombinedLoss(HellingerSurrogateLoss(), LeastSquaresLoss()),
|
||||
ClassTransformer(
|
||||
ClassRepresentation(
|
||||
CVClassifier(LogisticRegression(random_state=0), 5),
|
||||
is_probabilistic = True
|
||||
),
|
||||
)
|
||||
|
||||
from qunfold.quapy import QuaPyWrapper
|
||||
from qunfold import GenericMethod
|
||||
from quapy.method.composable import QUnfoldWrapper
|
||||
from qunfold import LinearMethod
|
||||
|
||||
model = QuaPyWrapper(GenericMethod(
|
||||
model = QUnfoldWrapper(LinearMethod(
|
||||
CombinedLoss(HellingerSurrogateLoss(), LeastSquaresLoss()),
|
||||
ClassTransformer(
|
||||
ClassRepresentation(
|
||||
CVClassifier(LogisticRegression(random_state=0), 5),
|
||||
is_probabilistic = True
|
||||
),
|
||||
|
|
@ -115,7 +115,7 @@ model = QuaPyWrapper(GenericMethod(
|
|||
|
||||
param_grid = {
|
||||
"loss__weights": [ (w, 1-w) for w in [.1, .5, .9] ],
|
||||
"transformer__classifier__estimator__C": [1e-1, 1e1],
|
||||
"representation__classifier__estimator__C": [1e-1, 1e1],
|
||||
}
|
||||
|
||||
grid_search = qp.model_selection.GridSearchQ(
|
||||
|
|
|
|||
Loading…
Reference in New Issue