Skip to content

Commit f5089f7

Browse files
ENH: replace regression_coverage_score by regression_coverage_score_v2, clarify docstring (#639)
--------- Co-authored-by: FaustinPulveric <[email protected]>
1 parent 8d1583f commit f5089f7

23 files changed

+2782
-1150
lines changed

doc/api.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,6 @@ Regression Metrics
9292
:toctree: generated/
9393
:template: function.rst
9494

95-
mapie.metrics.regression.regression_coverage_score
9695
mapie.metrics.regression.regression_coverage_score_v2
9796
mapie.metrics.regression.regression_mean_width_score
9897
mapie.metrics.regression.regression_ssc

examples/regression/1-quickstart/plot_compare_conformity_scores.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
from sklearn.ensemble import RandomForestRegressor
4343
from sklearn.model_selection import train_test_split
4444

45-
from mapie.metrics.regression import regression_coverage_score
45+
from mapie.metrics.regression import regression_coverage_score_v2
4646
from mapie_v1.regression import CrossConformalRegressor
4747

4848
RANDOM_STATE = 42
@@ -111,9 +111,9 @@
111111
X_test
112112
)
113113

114-
coverage_absconfscore = regression_coverage_score(
115-
y_test, y_pis_absconfscore[:, 0, 0], y_pis_absconfscore[:, 1, 0]
116-
)
114+
coverage_absconfscore = regression_coverage_score_v2(
115+
y_test, y_pis_absconfscore
116+
)[0]
117117

118118
##############################################################################
119119
# Prepare the results for matplotlib. Get the prediction intervals and their
@@ -146,9 +146,9 @@ def get_yerr(y_pred, y_pis):
146146
X_test
147147
)
148148

149-
coverage_gammaconfscore = regression_coverage_score(
150-
y_test, y_pis_gammaconfscore[:, 0, 0], y_pis_gammaconfscore[:, 1, 0]
151-
)
149+
coverage_gammaconfscore = regression_coverage_score_v2(
150+
y_test, y_pis_gammaconfscore
151+
)[0]
152152

153153
yerr_gammaconfscore = get_yerr(y_pred_gammaconfscore, y_pis_gammaconfscore)
154154
pred_int_width_gammaconfscore = (

examples/regression/1-quickstart/plot_prefit.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
from sklearn.neural_network import MLPRegressor
2828

2929
from numpy.typing import NDArray
30-
from mapie.metrics.regression import regression_coverage_score
30+
from mapie.metrics.regression import regression_coverage_score_v2
3131
from mapie_v1.regression import SplitConformalRegressor, ConformalizedQuantileRegressor
3232
from mapie_v1.utils import train_conformalize_test_split
3333

@@ -99,7 +99,7 @@ def f(x: NDArray) -> NDArray:
9999

100100
# Evaluate prediction and coverage level on testing set
101101
y_pred, y_pis = mapie.predict_interval(X_test.reshape(-1, 1))
102-
coverage = regression_coverage_score(y_test, y_pis[:, 0, 0], y_pis[:, 1, 0])
102+
coverage = regression_coverage_score_v2(y_test, y_pis)[0]
103103

104104

105105
##############################################################################
@@ -206,11 +206,10 @@ def f(x: NDArray) -> NDArray:
206206

207207
# Evaluate prediction and coverage level on testing set
208208
y_pred_cqr, y_pis_cqr = mapie_cqr.predict_interval(X_test.reshape(-1, 1))
209-
coverage_cqr = regression_coverage_score(
209+
coverage_cqr = regression_coverage_score_v2(
210210
y_test,
211-
y_pis_cqr[:, 0, 0],
212-
y_pis_cqr[:, 1, 0]
213-
)
211+
y_pis_cqr
212+
)[0]
214213

215214

216215
##############################################################################

examples/regression/1-quickstart/plot_toy_model.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from matplotlib import pyplot as plt
1010
from sklearn.datasets import make_regression
1111

12-
from mapie.metrics.regression import regression_coverage_score
12+
from mapie.metrics.regression import regression_coverage_score_v2
1313
from mapie_v1.regression import SplitConformalRegressor
1414
from mapie_v1.utils import train_conformalize_test_split
1515

@@ -33,11 +33,7 @@
3333
mapie_regressor.conformalize(X_conformalize, y_conformalize)
3434
y_pred, y_pred_interval = mapie_regressor.predict_interval(X_test)
3535

36-
coverage_scores = [
37-
regression_coverage_score(
38-
y_test, y_pred_interval[:, 0, i], y_pred_interval[:, 1, i]
39-
) for i, _ in enumerate(confidence_level)
40-
]
36+
coverage_scores = regression_coverage_score_v2(y_test, y_pred_interval)
4137

4238
plt.xlabel("x")
4339
plt.ylabel("y")

examples/regression/1-quickstart/plot_ts-tutorial.py

Lines changed: 25 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ class that block bootstraps the training set.
5757
from sklearn.model_selection import RandomizedSearchCV, TimeSeriesSplit
5858

5959
from mapie.metrics.regression import (
60-
regression_coverage_score,
60+
regression_coverage_score_v2,
6161
regression_mean_width_score, coverage_width_based,
6262
)
6363
from mapie.regression import MapieTimeSeriesRegressor
@@ -217,9 +217,9 @@ class that block bootstraps the training set.
217217
allow_infinite_bounds=True
218218
)
219219
y_pis_enbpi_npfit = np.clip(y_pis_enbpi_npfit, 1, 10)
220-
coverage_enbpi_npfit = regression_coverage_score(
221-
y_test, y_pis_enbpi_npfit[:, 0, 0], y_pis_enbpi_npfit[:, 1, 0]
222-
)
220+
coverage_enbpi_npfit = regression_coverage_score_v2(
221+
y_test, y_pis_enbpi_npfit
222+
)[0]
223223
width_enbpi_npfit = regression_mean_width_score(
224224
y_pis_enbpi_npfit[:, 0, 0], y_pis_enbpi_npfit[:, 1, 0]
225225
)
@@ -258,9 +258,9 @@ class that block bootstraps the training set.
258258
y_pis_aci_npfit[step:step + gap, :, :], 1, 10
259259
)
260260

261-
coverage_aci_npfit = regression_coverage_score(
262-
y_test, y_pis_aci_npfit[:, 0, 0], y_pis_aci_npfit[:, 1, 0]
263-
)
261+
coverage_aci_npfit = regression_coverage_score_v2(
262+
y_test, y_pis_aci_npfit
263+
)[0]
264264
width_aci_npfit = regression_mean_width_score(
265265
y_pis_aci_npfit[:, 0, 0], y_pis_aci_npfit[:, 1, 0]
266266
)
@@ -307,9 +307,9 @@ class that block bootstraps the training set.
307307
y_pis_enbpi_pfit[step:step + gap, :, :] = np.clip(
308308
y_pis_enbpi_pfit[step:step + gap, :, :], 1, 10
309309
)
310-
coverage_enbpi_pfit = regression_coverage_score(
311-
y_test, y_pis_enbpi_pfit[:, 0, 0], y_pis_enbpi_pfit[:, 1, 0]
312-
)
310+
coverage_enbpi_pfit = regression_coverage_score_v2(
311+
y_test, y_pis_enbpi_pfit
312+
)[0]
313313
width_enbpi_pfit = regression_mean_width_score(
314314
y_pis_enbpi_pfit[:, 0, 0], y_pis_enbpi_pfit[:, 1, 0]
315315
)
@@ -360,9 +360,9 @@ class that block bootstraps the training set.
360360
y_pis_aci_pfit[step:step + gap, :, :], 1, 10
361361
)
362362

363-
coverage_aci_pfit = regression_coverage_score(
364-
y_test, y_pis_aci_pfit[:, 0, 0], y_pis_aci_pfit[:, 1, 0]
365-
)
363+
coverage_aci_pfit = regression_coverage_score_v2(
364+
y_test, y_pis_aci_pfit
365+
)[0]
366366
width_aci_pfit = regression_mean_width_score(
367367
y_pis_aci_pfit[:, 0, 0], y_pis_aci_pfit[:, 1, 0]
368368
)
@@ -462,29 +462,25 @@ class that block bootstraps the training set.
462462

463463
for i in range(window, len(y_test), 1):
464464
rolling_coverage_aci_npfit.append(
465-
regression_coverage_score(
466-
y_test[i-window:i], y_pis_aci_npfit[i-window:i, 0, 0],
467-
y_pis_aci_npfit[i-window:i, 1, 0]
468-
)
465+
regression_coverage_score_v2(
466+
y_test[i-window:i], y_pis_aci_npfit[i-window:i]
467+
)[0]
469468
)
470469
rolling_coverage_aci_pfit.append(
471-
regression_coverage_score(
472-
y_test[i-window:i], y_pis_aci_pfit[i-window:i, 0, 0],
473-
y_pis_aci_pfit[i-window:i, 1, 0]
474-
)
470+
regression_coverage_score_v2(
471+
y_test[i-window:i], y_pis_aci_pfit[i-window:i]
472+
)[0]
475473
)
476474

477475
rolling_coverage_enbpi_npfit.append(
478-
regression_coverage_score(
479-
y_test[i-window:i], y_pis_enbpi_npfit[i-window:i, 0, 0],
480-
y_pis_enbpi_npfit[i-window:i, 1, 0]
481-
)
476+
regression_coverage_score_v2(
477+
y_test[i-window:i], y_pis_enbpi_npfit[i-window:i]
478+
)[0]
482479
)
483480
rolling_coverage_enbpi_pfit.append(
484-
regression_coverage_score(
485-
y_test[i-window:i], y_pis_enbpi_pfit[i-window:i, 0, 0],
486-
y_pis_enbpi_pfit[i-window:i, 1, 0]
487-
)
481+
regression_coverage_score_v2(
482+
y_test[i-window:i], y_pis_enbpi_pfit[i-window:i]
483+
)[0]
488484
)
489485

490486
plt.figure(figsize=(10, 5))

examples/regression/2-advanced-analysis/plot-coverage-width-based-criterion.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
from sklearn.model_selection import train_test_split
2626

2727
from mapie.metrics.regression import (
28-
regression_coverage_score,
28+
regression_coverage_score_v2,
2929
regression_mean_width_score, coverage_width_based,
3030
)
3131
from mapie_v1.regression import (
@@ -260,11 +260,10 @@ def plot_1d_data(
260260
cwc_score = {}
261261

262262
for strategy in STRATEGIES:
263-
coverage_score[strategy] = regression_coverage_score(
263+
coverage_score[strategy] = regression_coverage_score_v2(
264264
y_test,
265-
y_pis[strategy][:, 0, 0],
266-
y_pis[strategy][:, 1, 0]
267-
)
265+
y_pis[strategy]
266+
)[0]
268267
width_mean_score[strategy] = regression_mean_width_score(
269268
y_pis[strategy][:, 0, 0],
270269
y_pis[strategy][:, 1, 0]

examples/regression/2-advanced-analysis/plot_cqr_symmetry_difference.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from sklearn.datasets import make_regression
1313
from sklearn.ensemble import GradientBoostingRegressor
1414

15-
from mapie.metrics.regression import regression_coverage_score
15+
from mapie.metrics.regression import regression_coverage_score_v2
1616
from mapie_v1.regression import ConformalizedQuantileRegressor
1717
from mapie_v1.utils import train_conformalize_test_split
1818

@@ -56,12 +56,12 @@
5656
print(f"y_pis_sym[:, 0].shape: {y_pis_sym[:, 0].shape}")
5757
print(f"y_pis_sym[:, 1].shape: {y_pis_sym[:, 1].shape}")
5858
# Calculate coverage scores
59-
coverage_score_sym = regression_coverage_score(
60-
y_test, y_pis_sym[:, 0], y_pis_sym[:, 1]
61-
)
62-
coverage_score_asym = regression_coverage_score(
63-
y_test, y_pis_asym[:, 0], y_pis_asym[:, 1]
64-
)
59+
coverage_score_sym = regression_coverage_score_v2(
60+
y_test, y_pis_sym
61+
)[0]
62+
coverage_score_asym = regression_coverage_score_v2(
63+
y_test, y_pis_asym
64+
)[0]
6565

6666
# Sort the values for plotting
6767
order = np.argsort(X_test[:, 0])

examples/regression/2-advanced-analysis/plot_cqr_tutorial.py

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
from sklearn.model_selection import KFold, RandomizedSearchCV, train_test_split
3131

3232
from mapie.metrics.regression import (
33-
regression_coverage_score,
33+
regression_coverage_score_v2,
3434
regression_mean_width_score,
3535
)
3636
from mapie_v1.regression import (
@@ -277,11 +277,10 @@ def plot_prediction_intervals(
277277
lower_bound[strategy_name],
278278
upper_bound[strategy_name]
279279
) = sort_y_values(y_test, y_pred[strategy_name], y_pis[strategy_name])
280-
coverage[strategy_name] = regression_coverage_score(
280+
coverage[strategy_name] = regression_coverage_score_v2(
281281
y_test,
282-
y_pis[strategy_name][:, 0, 0],
283-
y_pis[strategy_name][:, 1, 0]
284-
)
282+
y_pis[strategy_name]
283+
)[0]
285284
width[strategy_name] = regression_mean_width_score(
286285
y_pis[strategy_name][:, 0, 0],
287286
y_pis[strategy_name][:, 1, 0]
@@ -363,11 +362,9 @@ def get_coverages_widths_by_bins(
363362
y_low_ = np.take(lower_bound[strategy], indices)
364363
y_high_ = np.take(upper_bound[strategy], indices)
365364
if want == "coverage":
366-
recap[name].append(regression_coverage_score(
367-
y_test_trunc[0],
368-
y_low_[0],
369-
y_high_[0]
370-
))
365+
recap[name].append(regression_coverage_score_v2(
366+
y_test_trunc[0], np.stack((y_low_[0], y_high_[0]), axis=-1)
367+
)[0])
371368
elif want == "width":
372369
recap[name].append(
373370
regression_mean_width_score(y_low_[0], y_high_[0])

examples/regression/2-advanced-analysis/plot_main-tutorial-regression.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
from sklearn.preprocessing import PolynomialFeatures
3838
from sklearn.model_selection import train_test_split
3939

40-
from mapie.metrics.regression import regression_coverage_score
40+
from mapie.metrics.regression import regression_coverage_score_v2
4141
from mapie_v1.regression import (
4242
CrossConformalRegressor,
4343
JackknifeAfterBootstrapRegressor,
@@ -303,9 +303,9 @@ def plot_1d_data(
303303

304304
pd.DataFrame([
305305
[
306-
regression_coverage_score(
307-
y_test, y_pis[strategy][:, 0, 0], y_pis[strategy][:, 1, 0]
308-
),
306+
regression_coverage_score_v2(
307+
y_test, y_pis[strategy]
308+
)[0],
309309
(
310310
y_pis[strategy][:, 1, 0] - y_pis[strategy][:, 0, 0]
311311
).mean()
@@ -538,9 +538,9 @@ def get_heteroscedastic_coverage(y_test, y_pis, STRATEGIES, bins):
538538
y_test_trunc = np.take(y_test, indices)
539539
y_low_ = np.take(y_pis[strategy][:, 0, 0], indices)
540540
y_high_ = np.take(y_pis[strategy][:, 1, 0], indices)
541-
score_coverage = regression_coverage_score(
542-
y_test_trunc[0], y_low_[0], y_high_[0]
543-
)
541+
score_coverage = regression_coverage_score_v2(
542+
y_test_trunc[0], np.stack((y_low_[0], y_high_[0]), axis=-1)
543+
)[0]
544544
recap[name].append(score_coverage)
545545
recap_df = pd.DataFrame(recap, index=STRATEGIES)
546546
return recap_df
@@ -569,9 +569,9 @@ def get_heteroscedastic_coverage(y_test, y_pis, STRATEGIES, bins):
569569

570570
pd.DataFrame([
571571
[
572-
regression_coverage_score(
573-
y_test, y_pis[strategy][:, 0, 0], y_pis[strategy][:, 1, 0]
574-
),
572+
regression_coverage_score_v2(
573+
y_test, y_pis[strategy]
574+
)[0],
575575
(
576576
y_pis[strategy][:, 1, 0] - y_pis[strategy][:, 0, 0]
577577
).mean()
@@ -766,9 +766,9 @@ def get_1d_data_with_normal_distrib(funct, mu, sigma, n_samples, noise):
766766

767767
pd.DataFrame([
768768
[
769-
regression_coverage_score(
770-
y_test, y_pis[strategy][:, 0, 0], y_pis[strategy][:, 1, 0]
771-
),
769+
regression_coverage_score_v2(
770+
y_test, y_pis[strategy]
771+
)[0],
772772
(
773773
y_pis[strategy][:, 1, 0] - y_pis[strategy][:, 0, 0]
774774
).mean()

examples/regression/2-advanced-analysis/plot_nested-cv.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
from sklearn.model_selection import RandomizedSearchCV, train_test_split
4646
from sklearn.datasets import make_sparse_uncorrelated
4747

48-
from mapie.metrics.regression import regression_coverage_score
48+
from mapie.metrics.regression import regression_coverage_score_v2
4949
from mapie_v1.regression import CrossConformalRegressor
5050

5151

@@ -91,9 +91,9 @@
9191
X_test, aggregate_predictions='median'
9292
)
9393
widths_non_nested = y_pis_non_nested[:, 1, 0] - y_pis_non_nested[:, 0, 0]
94-
coverage_non_nested = regression_coverage_score(
95-
y_test, y_pis_non_nested[:, 0, 0], y_pis_non_nested[:, 1, 0]
96-
)
94+
coverage_non_nested = regression_coverage_score_v2(
95+
y_test, y_pis_non_nested
96+
)[0]
9797
score_non_nested = root_mean_squared_error(y_test, y_pred_non_nested)
9898

9999
# Nested approach with the CV+ strategy using the Random Forest model.
@@ -117,9 +117,9 @@
117117
X_test, aggregate_predictions='median'
118118
)
119119
widths_nested = y_pis_nested[:, 1, 0] - y_pis_nested[:, 0, 0]
120-
coverage_nested = regression_coverage_score(
121-
y_test, y_pis_nested[:, 0, 0], y_pis_nested[:, 1, 0]
122-
)
120+
coverage_nested = regression_coverage_score_v2(
121+
y_test, y_pis_nested
122+
)[0]
123123
score_nested = root_mean_squared_error(y_test, y_pred_nested)
124124

125125
# Print scores and effective coverages.

0 commit comments

Comments
 (0)