diff --git a/KDEy/kdey_devel.py b/KDEy/kdey_devel.py index 57cc487..c7fc237 100644 --- a/KDEy/kdey_devel.py +++ b/KDEy/kdey_devel.py @@ -326,7 +326,7 @@ class KDEyMLauto2(KDEyML): if self.target == 'likelihood': loss_fn = neg_loglikelihood_prev else: - loss_fn = lambda prev_hat: qp.error.from_name(self.target)(prev, prev_hat) + loss_fn = lambda prev_hat: qp.error.from_name(self.target)(prevtrue, prev_hat) pred_prev, neglikelihood = optim_minimize(loss_fn, init_prev, return_loss=True) loss_accum += neglikelihood diff --git a/KDEy/quantification_evaluation.py b/KDEy/quantification_evaluation.py index b35dfe8..7591783 100644 --- a/KDEy/quantification_evaluation.py +++ b/KDEy/quantification_evaluation.py @@ -43,7 +43,7 @@ METHODS = [ ('KDEy-AE', KDEyMLauto2(newLR(), bandwidth='auto', target='mae', search='grid'), wrap_hyper(logreg_grid)), ('KDEy-AE+', KDEyMLauto2(newLR(), bandwidth='auto', target='mae', search='optim'), wrap_hyper(logreg_grid)), ('KDEy-RAE', KDEyMLauto2(newLR(), bandwidth='auto', target='mrae', search='grid'), wrap_hyper(logreg_grid)), - ('KDEy-RAE', KDEyMLauto2(newLR(), bandwidth='auto', target='mrae', search='optim'), wrap_hyper(logreg_grid)), + ('KDEy-RAE+', KDEyMLauto2(newLR(), bandwidth='auto', target='mrae', search='optim'), wrap_hyper(logreg_grid)), ] @@ -167,4 +167,4 @@ if __name__ == '__main__': for method_name, quantifier, param_grid in METHODS + TRANSDUCTIVE_METHODS: run_experiment(method_name, quantifier, param_grid) - show_results(global_result_path) \ No newline at end of file + show_results(global_result_path)