import math import numpy as np from sklearn.linear_model import LogisticRegression from sklearn.model_selection import train_test_split, cross_val_predict from sklearn.neighbors import KernelDensity import matplotlib.pyplot as plt import numpy as np from data import LabelledCollection scale = 100 import quapy as qp negatives = np.random.normal(loc = 0.2, scale=0.2, size=20000) negatives = np.asarray([x for x in negatives if 0 <= x <= 1]) positives = np.random.normal(loc = 0.75, scale=0.05, size=20000) positives = np.asarray([x for x in positives if 0 <= x <= 1]) prev = 0.1 test = np.concatenate([ negatives[:int(len(negatives)*(1-prev))], positives[:int(len(positives)*(prev))], ]) nbins = 30 plt.rcParams.update({'font.size': 7}) fig = plt.figure() positions = np.asarray([2,1,0]) colors = ['r', 'g', 'b'] ax = fig.add_subplot(111, projection='3d') ax.set_box_aspect((3, 1, 0.8)) for post, c, z in zip([test, positives, negatives], colors, positions): hist, bins = np.histogram(post, bins=np.linspace(0,1, nbins+1), density=True) xs = (bins[:-1] + bins[1:])/2 ax.bar(xs, hist, width=1 / nbins, zs=z, zdir='y', color=c, ec=c, alpha=0.6) ax.yaxis.set_ticks(positions) ax.yaxis.set_ticklabels([' '*20+'Test distribution', ' '*20+'Positive distribution', ' '*20+'Negative distribution']) # ax.xaxis.set_ticks([]) # ax.xaxis.set_ticklabels([], minor=True) ax.zaxis.set_ticks([]) ax.zaxis.set_ticklabels([], minor=True) #plt.figure(figsize=(10,6)) #plt.show() plt.savefig('./histograms3d_CACM2023.pdf')