diff --git a/scoringrules/_crps.py b/scoringrules/_crps.py index 6fafb16..ad0df51 100644 --- a/scoringrules/_crps.py +++ b/scoringrules/_crps.py @@ -507,6 +507,7 @@ def crps_beta( upper: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the beta distribution. @@ -540,6 +541,9 @@ def crps_beta( Upper bound of the forecast beta distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -559,6 +563,21 @@ def crps_beta( >>> sr.crps_beta(0.3, 0.7, 1.1) 0.08501024366637236 """ + if backend == "torch": + raise TypeError("Torch backend is not supported for the Beta distribution.") + if check_pars: + B = backends.active if backend is None else backends[backend] + a, b, lower, upper = map(B.asarray, (a, b, lower, upper)) + if B.any(a <= 0): + raise ValueError( + "`a` contains non-positive entries. The shape parameters of the Beta distribution must be positive." + ) + if B.any(b <= 0): + raise ValueError( + "`b` contains non-positive entries. The shape parameters of the Beta distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return crps.beta(obs, a, b, lower, upper, backend=backend) @@ -568,6 +587,7 @@ def crps_binomial( prob: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the binomial distribution. @@ -590,6 +610,9 @@ def crps_binomial( Probability parameter of the forecast binomial distribution as a float or array of floats. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -609,6 +632,21 @@ def crps_binomial( >>> sr.crps_binomial(4, 10, 0.5) 0.5955772399902344 """ + if backend == "torch": + raise TypeError("Torch backend is not supported for the Binomial distribution.") + if check_pars: + B = backends.active if backend is None else backends[backend] + prob = B.asarray(prob) + if B.any(prob < 0) or B.any(prob > 1): + raise ValueError("`prob` contains values outside the range [0, 1].") + if B.any(n <= 0): + raise ValueError( + "`n` contains non-positive entries. The size parameter of the binomial distribution must be positive." + ) + if not B.all_integer(n): + raise ValueError( + "`n` contains non-integer entries. The size parameter of the binomial distribution must be an integer." + ) return crps.binomial(obs, n, prob, backend=backend) @@ -617,6 +655,7 @@ def crps_exponential( rate: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the exponential distribution. @@ -635,6 +674,9 @@ def crps_exponential( Rate parameter of the forecast exponential distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -657,16 +699,24 @@ def crps_exponential( >>> sr.crps_exponential(np.array([0.8, 0.9]), np.array([3.0, 2.0])) array([0.36047864, 0.31529889]) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + rate = B.asarray(rate) + if B.any(rate <= 0): + raise ValueError( + "`rate` contains non-positive entries. The rate parameter of the exponential distribution must be positive." + ) return crps.exponential(obs, rate, backend=backend) def crps_exponentialM( obs: "ArrayLike", - mass: "ArrayLike" = 0.0, location: "ArrayLike" = 0.0, scale: "ArrayLike" = 1.0, + mass: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the standard exponential distribution with a point mass at the boundary. @@ -700,6 +750,9 @@ def crps_exponentialM( Scale parameter of the forecast exponential distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -716,10 +769,19 @@ def crps_exponentialM( Examples -------- >>> import scoringrules as sr - >>> sr.crps_exponentialM(0.4, 0.2, 0.0, 1.0) + >>> sr.crps_exponentialM(0.4, 0.0, 1.0, 0.2) 0.19251207365702294 """ - return crps.exponentialM(obs, mass, location, scale, backend=backend) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, mass = map(B.asarray, (scale, mass)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter must be positive." + ) + if B.any(mass < 0) or B.any(mass > 1): + raise ValueError("`mass` contains entries outside the range [0, 1].") + return crps.exponentialM(obs, location, scale, mass, backend=backend) def crps_2pexponential( @@ -729,6 +791,7 @@ def crps_2pexponential( location: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the two-piece exponential distribution. @@ -755,6 +818,9 @@ def crps_2pexponential( Location parameter of the forecast two-piece exponential distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -774,6 +840,17 @@ def crps_2pexponential( >>> sr.crps_2pexponential(0.8, 3.0, 1.4, 0.0) array(1.18038524) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale1, scale2 = map(B.asarray, (scale1, scale2)) + if B.any(scale1 <= 0): + raise ValueError( + "`scale1` contains non-positive entries. The scale parameters of the two-piece exponential distribution must be positive." + ) + if B.any(scale2 <= 0): + raise ValueError( + "`scale2` contains non-positive entries. The scale parameters of the two-piece exponential distribution must be positive." + ) return crps.twopexponential(obs, scale1, scale2, location, backend=backend) @@ -784,6 +861,7 @@ def crps_gamma( *, scale: "ArrayLike | None" = None, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the gamma distribution. @@ -812,6 +890,9 @@ def crps_gamma( Either ``rate`` or ``scale`` must be provided. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -844,6 +925,18 @@ def crps_gamma( if rate is None: rate = 1.0 / scale + if check_pars: + B = backends.active if backend is None else backends[backend] + shape, rate = map(B.asarray, (shape, rate)) + if B.any(shape <= 0): + raise ValueError( + "`shape` contains non-positive entries. The shape parameter of the gamma distribution must be positive." + ) + if B.any(rate <= 0): + raise ValueError( + "`rate` or `scale` contains non-positive entries. The rate and scale parameters of the gamma distribution must be positive." + ) + return crps.gamma(obs, shape, rate, backend=backend) @@ -851,10 +944,11 @@ def crps_csg0( obs: "ArrayLike", shape: "ArrayLike", rate: "ArrayLike | None" = None, - *, scale: "ArrayLike | None" = None, shift: "ArrayLike" = 0.0, + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the censored, shifted gamma distribution. @@ -880,13 +974,16 @@ def crps_csg0( rate : array_like, optional Rate parameter of the forecast CSG distribution. Either ``rate`` or ``scale`` must be provided. + shift : array_like + Shift parameter of the forecast CSG distribution. scale : array_like, optional Scale parameter of the forecast CSG distribution, where ``scale = 1 / rate``. Either ``rate`` or ``scale`` must be provided. - shift : array_like - Shift parameter of the forecast CSG distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -920,6 +1017,18 @@ def crps_csg0( if rate is None: rate = 1.0 / scale + if check_pars: + B = backends.active if backend is None else backends[backend] + shape, rate = map(B.asarray, (shape, rate)) + if B.any(shape <= 0): + raise ValueError( + "`shape` contains non-positive entries. The shape parameter of the gamma distribution must be positive." + ) + if B.any(rate <= 0): + raise ValueError( + "`rate` or `scale` contains non-positive entries. The rate and scale parameters of the gamma distribution must be positive." + ) + return crps.csg0(obs, shape, rate, shift, backend=backend) @@ -930,6 +1039,7 @@ def crps_gev( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the generalised extreme value (GEV) distribution. @@ -953,6 +1063,9 @@ def crps_gev( Scale parameter of the forecast GEV distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1014,6 +1127,19 @@ def crps_gev( >>> sr.crps_gev(0.3, 0.1) 0.2924712413052034 """ + if backend == "torch": + raise TypeError("Torch backend is not supported for the GEV distribution.") + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, shape = map(B.asarray, (scale, shape)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the GEV distribution must be positive." + ) + if B.any(shape >= 1): + raise ValueError( + "`shape` contains entries larger than 1. The CRPS for the GEV distribution is only valid for shape values less than 1." + ) return crps.gev(obs, shape, location, scale, backend=backend) @@ -1025,6 +1151,7 @@ def crps_gpd( mass: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the generalised pareto distribution (GPD). @@ -1057,6 +1184,9 @@ def crps_gpd( Mass parameter at the lower boundary of the forecast GPD distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1076,19 +1206,35 @@ def crps_gpd( >>> sr.crps_gpd(0.3, 0.9) 0.6849331901197213 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, shape, mass = map(B.asarray, (scale, shape, mass)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the GPD distribution must be positive. `nan` is returned in these places." + ) + if B.any(shape >= 1): + raise ValueError( + "`shape` contains entries larger than 1. The CRPS for the GPD distribution is only valid for shape values less than 1. `nan` is returned in these places." + ) + if B.any(mass < 0) or B.any(mass > 1): + raise ValueError( + "`mass` contains entries outside the range [0, 1]. `nan` is returned in these places." + ) return crps.gpd(obs, shape, location, scale, mass, backend=backend) def crps_gtclogistic( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), lmass: "ArrayLike" = 0.0, umass: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the generalised truncated and censored logistic distribution. @@ -1135,6 +1281,11 @@ def crps_gtclogistic( Point mass assigned to the lower boundary of the forecast distribution. umass : array_like Point mass assigned to the upper boundary of the forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1147,6 +1298,26 @@ def crps_gtclogistic( >>> sr.crps_gtclogistic(0.0, 0.1, 0.4, -1.0, 1.0, 0.1, 0.1) 0.1658713056903939 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lmass, umass, lower, upper = map( + B.asarray, (scale, lmass, umass, lower, upper) + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the generalised logistic distribution must be positive." + ) + if B.any(lmass < 0) or B.any(lmass > 1): + raise ValueError("`lmass` contains entries outside the range [0, 1].") + if B.any(umass < 0) or B.any(umass > 1): + raise ValueError("`umass` contains entries outside the range [0, 1].") + if B.any(umass + lmass > 1): + raise ValueError( + "The sum of `umass` and `lmass` should be no larger than 1." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") + return crps.gtclogistic( obs, location, @@ -1161,12 +1332,13 @@ def crps_gtclogistic( def crps_tlogistic( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the truncated logistic distribution. @@ -1185,6 +1357,11 @@ def crps_tlogistic( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1197,6 +1374,15 @@ def crps_tlogistic( >>> sr.crps_tlogistic(0.0, 0.1, 0.4, -1.0, 1.0) 0.12714830546327846 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the truncated logistic distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return crps.gtclogistic( obs, location, scale, lower, upper, 0.0, 0.0, backend=backend ) @@ -1204,12 +1390,13 @@ def crps_tlogistic( def crps_clogistic( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the censored logistic distribution. @@ -1228,6 +1415,11 @@ def crps_clogistic( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1240,6 +1432,15 @@ def crps_clogistic( >>> sr.crps_clogistic(0.0, 0.1, 0.4, -1.0, 1.0) 0.15805632276434345 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the censored logistic distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") lmass = stats._logis_cdf((lower - location) / scale) umass = 1 - stats._logis_cdf((upper - location) / scale) return crps.gtclogistic( @@ -1256,14 +1457,15 @@ def crps_clogistic( def crps_gtcnormal( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), lmass: "ArrayLike" = 0.0, umass: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the generalised truncated and censored normal distribution. @@ -1297,6 +1499,23 @@ def crps_gtcnormal( >>> sr.crps_gtcnormal(0.0, 0.1, 0.4, -1.0, 1.0, 0.1, 0.1) 0.1351100832878575 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lmass, umass, lower, upper = map( + B.asarray, (scale, lmass, umass, lower, upper) + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the generalised normal distribution must be positive." + ) + if B.any(lmass < 0) or B.any(lmass > 1): + raise ValueError("`lmass` contains entries outside the range [0, 1].") + if B.any(umass < 0) or B.any(umass > 1): + raise ValueError("`umass` contains entries outside the range [0, 1].") + if B.any(umass + lmass >= 1): + raise ValueError("The sum of `umass` and `lmass` should be smaller than 1.") + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return crps.gtcnormal( obs, location, @@ -1311,12 +1530,13 @@ def crps_gtcnormal( def crps_tnormal( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the truncated normal distribution. @@ -1335,6 +1555,11 @@ def crps_tnormal( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1347,17 +1572,27 @@ def crps_tnormal( >>> sr.crps_tnormal(0.0, 0.1, 0.4, -1.0, 1.0) 0.10070146718008832 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the truncated normal distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return crps.gtcnormal(obs, location, scale, lower, upper, 0.0, 0.0, backend=backend) def crps_cnormal( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the censored normal distribution. @@ -1376,6 +1611,11 @@ def crps_cnormal( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1388,6 +1628,15 @@ def crps_cnormal( >>> sr.crps_cnormal(0.0, 0.1, 0.4, -1.0, 1.0) 0.10338851213123085 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the censored normal distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") lmass = stats._norm_cdf((lower - location) / scale) umass = 1 - stats._norm_cdf((upper - location) / scale) return crps.gtcnormal( @@ -1413,6 +1662,7 @@ def crps_gtct( umass: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the generalised truncated and censored t distribution. @@ -1457,6 +1707,11 @@ def crps_gtct( Point mass assigned to the lower boundary of the forecast distribution. umass : array_like Point mass assigned to the upper boundary of the forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1476,6 +1731,31 @@ def crps_gtct( >>> sr.crps_gtct(0.0, 2.0, 0.1, 0.4, -1.0, 1.0, 0.1, 0.1) 0.13997789333289662 """ + if backend in ["torch", "tensorflow", "jax"]: + raise TypeError( + "Torch, Tensorflow, and JAX backends are not supported for the Generalised t distribution." + ) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lmass, umass, lower, upper = map( + B.asarray, (scale, lmass, umass, lower, upper) + ) + if B.any(df <= 0): + raise ValueError( + "`df` contains non-positive entries. The degrees of freedom parameter of the generalised t distribution must be positive." + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the generalised t distribution must be positive." + ) + if B.any(lmass < 0) or B.any(lmass > 1): + raise ValueError("`lmass` contains entries outside the range [0, 1].") + if B.any(umass < 0) or B.any(umass > 1): + raise ValueError("`umass` contains entries outside the range [0, 1].") + if B.any(umass + lmass >= 1): + raise ValueError("The sum of `umass` and `lmass` should be smaller than 1.") + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return crps.gtct( obs, df, @@ -1498,6 +1778,7 @@ def crps_tt( upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the truncated t distribution. @@ -1518,6 +1799,11 @@ def crps_tt( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1530,6 +1816,23 @@ def crps_tt( >>> sr.crps_tt(0.0, 2.0, 0.1, 0.4, -1.0, 1.0) 0.10323007471747117 """ + if backend in ["torch", "tensorflow", "jax"]: + raise TypeError( + "Torch, Tensorflow, and JAX backends are not supported for the Truncated t distribution." + ) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(df <= 0): + raise ValueError( + "`df` contains non-positive entries. The degrees of freedom parameter of the truncated t distribution must be positive." + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the truncated t distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return crps.gtct( obs, df, @@ -1552,6 +1855,7 @@ def crps_ct( upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the censored t distribution. @@ -1572,6 +1876,11 @@ def crps_ct( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1584,6 +1893,23 @@ def crps_ct( >>> sr.crps_ct(0.0, 2.0, 0.1, 0.4, -1.0, 1.0) 0.12672580744453948 """ + if backend in ["torch", "tensorflow", "jax"]: + raise TypeError( + "Torch, Tensorflow, and JAX backends are not supported for the Censored t distribution." + ) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(df <= 0): + raise ValueError( + "`df` contains non-positive entries. The degrees of freedom parameter of the censored t distribution must be positive." + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the censored t distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") lmass = stats._t_cdf((lower - location) / scale, df) umass = 1 - stats._t_cdf((upper - location) / scale, df) return crps.gtct( @@ -1606,6 +1932,7 @@ def crps_hypergeometric( k: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the hypergeometric distribution. @@ -1631,6 +1958,9 @@ def crps_hypergeometric( Number of draws, without replacement. Must be in 0, 1, ..., m + n. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1650,6 +1980,36 @@ def crps_hypergeometric( >>> sr.crps_hypergeometric(5, 7, 13, 12) 0.44697415547610597 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + if B.any(m < 0): + raise ValueError( + "`m` contains negative entries. The number of successes in the hypergeometric distribution must be non-negative." + ) + if B.any(n < 0): + raise ValueError( + "`n` contains negative entries. The number of failures in the hypergeometric distribution must be non-negative." + ) + if B.any(k < 0): + raise ValueError( + "`k` contains negative entries. The number of draws in the hypergeometric distribution must be non-negative." + ) + if B.any(k > m + n): + raise ValueError( + "`k` contains values larger than `m + n`. The number of draws in the hypergeometric distribution must be larger than the total number of successes and failures." + ) + if not B.all_integer(m): + raise ValueError( + "`m` contains non-integer entries. The number of successes in the hypergeometric distribution must be an integer." + ) + if not B.all_integer(n): + raise ValueError( + "`n` contains non-integer entries. The number of failures in the hypergeometric distribution must be an integer." + ) + if not B.all_integer(k): + raise ValueError( + "`k` contains non-integer entries. The number of draws in the hypergeometric distribution must be an integer." + ) return crps.hypergeometric(obs, m, n, k, backend=backend) @@ -1659,6 +2019,7 @@ def crps_laplace( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the laplace distribution. @@ -1681,6 +2042,9 @@ def crps_laplace( Scale parameter of the forecast laplace distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1700,15 +2064,23 @@ def crps_laplace( >>> sr.crps_laplace(0.3, 0.1, 0.2) 0.12357588823428847 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale = B.asarray(scale) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the laplace must be positive." + ) return crps.laplace(obs, location, scale, backend=backend) def crps_logistic( obs: "ArrayLike", - mu: "ArrayLike", - sigma: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the logistic distribution. @@ -1724,10 +2096,15 @@ def crps_logistic( ---------- obs : array_like Observed values. - mu: array_like + location: array_like Location parameter of the forecast logistic distribution. - sigma: array_like + scale: array_like Scale parameter of the forecast logistic distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1747,7 +2124,14 @@ def crps_logistic( >>> sr.crps_logistic(0.0, 0.4, 0.1) 0.3036299855835619 """ - return crps.logistic(obs, mu, sigma, backend=backend) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale = B.asarray(scale) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the logistic distribution must be positive." + ) + return crps.logistic(obs, location, scale, backend=backend) def crps_loglaplace( @@ -1756,6 +2140,7 @@ def crps_loglaplace( scalelog: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the log-Laplace distribution. @@ -1788,6 +2173,9 @@ def crps_loglaplace( Scale parameter of the forecast log-laplace distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1807,14 +2195,23 @@ def crps_loglaplace( >>> sr.crps_loglaplace(3.0, 0.1, 0.9) 1.162020513653791 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scalelog = B.asarray(scalelog) + if B.any(scalelog <= 0) or B.any(scalelog >= 1): + raise ValueError( + "`scalelog` contains entries outside of the range (0, 1). The scale parameter of the log-laplace distribution must be between 0 and 1." + ) return crps.loglaplace(obs, locationlog, scalelog, backend=backend) def crps_loglogistic( obs: "ArrayLike", - mulog: "ArrayLike", - sigmalog: "ArrayLike", + locationlog: "ArrayLike", + scalelog: "ArrayLike", + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the log-logistic distribution. @@ -1841,13 +2238,15 @@ def crps_loglogistic( ---------- obs : array_like The observed values. - mulog : array_like + locationlog : array_like Location parameter of the log-logistic distribution. - sigmalog : array_like + scalelog : array_like Scale parameter of the log-logistic distribution. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. - + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1867,14 +2266,27 @@ def crps_loglogistic( >>> sr.crps_loglogistic(3.0, 0.1, 0.9) 1.1329527730161177 """ - return crps.loglogistic(obs, mulog, sigmalog, backend=backend) + if backend == "torch": + raise TypeError( + "Torch backend is not supported for the log-logistic distribution." + ) + if check_pars: + B = backends.active if backend is None else backends[backend] + scalelog = B.asarray(scalelog) + if B.any(scalelog <= 0) or B.any(scalelog >= 1): + raise ValueError( + "`scalelog` contains entries outside of the range (0, 1). The scale parameter of the log-logistic distribution must be between 0 and 1." + ) + return crps.loglogistic(obs, locationlog, scalelog, backend=backend) def crps_lognormal( obs: "ArrayLike", mulog: "ArrayLike", sigmalog: "ArrayLike", + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the lognormal distribution. @@ -1899,6 +2311,11 @@ def crps_lognormal( Mean of the normal underlying distribution. sigmalog : array_like Standard deviation of the underlying normal distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1917,6 +2334,13 @@ def crps_lognormal( >>> sr.crps_lognormal(0.1, 0.4, 0.0) 1.3918246976412703 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + sigmalog = B.asarray(sigmalog) + if B.any(sigmalog <= 0): + raise ValueError( + "`sigmalog` contains non-positive entries. The scale parameter of the log-normal distribution must be positive." + ) return crps.lognormal(obs, mulog, sigmalog, backend=backend) @@ -1928,6 +2352,7 @@ def crps_mixnorm( m_axis: "ArrayLike" = -1, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for a mixture of normal distributions. @@ -1953,6 +2378,9 @@ def crps_mixnorm( The axis corresponding to the mixture components. Default is the last axis. backend : str, optional The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1977,9 +2405,20 @@ def crps_mixnorm( if w is None: M: int = m.shape[m_axis] - w = B.zeros(m.shape) + 1 / M + w = B.ones(m.shape) / M else: w = B.asarray(w) + w = w / B.sum(w, axis=m_axis, keepdims=True) + + if check_pars: + if B.any(s <= 0): + raise ValueError( + "`s` contains non-positive entries. The scale parameters of the normal distributions must be positive." + ) + if B.any(w < 0): + raise ValueError( + "`w` contains negative entries. The weights of the component distributions must be non-negative." + ) if m_axis != -1: m = B.moveaxis(m, m_axis, -1) @@ -1993,9 +2432,10 @@ def crps_negbinom( obs: "ArrayLike", n: "ArrayLike", prob: "ArrayLike | None" = None, - *, mu: "ArrayLike | None" = None, + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the negative binomial distribution. @@ -2018,6 +2458,11 @@ def crps_negbinom( Probability parameter of the forecast negative binomial distribution. mu: array_like Mean of the forecast negative binomial distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -2041,10 +2486,30 @@ def crps_negbinom( ValueError If both `prob` and `mu` are provided, or if neither is provided. """ + if backend in ["torch", "tensorflow", "jax"]: + raise TypeError( + "Torch, Tensorflow, and JAX backends are not supported for the Negative Binomial distribution." + ) if (prob is None and mu is None) or (prob is not None and mu is not None): raise ValueError( "Either `prob` or `mu` must be provided, but not both or neither." ) + if check_pars: + B = backends.active if backend is None else backends[backend] + if prob is not None: + prob = B.asarray(prob) + if B.any(prob < 0) or B.any(prob > 1): + raise ValueError("`prob` contains values outside the range [0, 1].") + else: + mu = B.asarray(mu) + if B.any(mu < 0): + raise ValueError( + "`mu` contains negative values. The mean of the negative binomial distribution must be non-negative." + ) + if B.any(n <= 0): + raise ValueError( + "`n` contains non-positive entries. The size parameter of the negative binomial distribution must be positive." + ) if prob is None: prob = n / (n + mu) @@ -2054,10 +2519,11 @@ def crps_negbinom( def crps_normal( obs: "ArrayLike", - mu: "ArrayLike", - sigma: "ArrayLike", + mu: "ArrayLike" = 0.0, + sigma: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the normal distribution. @@ -2077,6 +2543,11 @@ def crps_normal( Mean of the forecast normal distribution. sigma: array_like Standard deviation of the forecast normal distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -2095,16 +2566,24 @@ def crps_normal( >>> sr.crps_normal(0.0, 0.1, 0.4) 0.10339992515976162 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + sigma = B.asarray(sigma) + if B.any(sigma <= 0): + raise ValueError( + "`sigma` contains non-positive entries. The standard deviation of the normal distribution must be positive." + ) return crps.normal(obs, mu, sigma, backend=backend) def crps_2pnormal( obs: "ArrayLike", - scale1: "ArrayLike", - scale2: "ArrayLike", - location: "ArrayLike", + scale1: "ArrayLike" = 1.0, + scale2: "ArrayLike" = 1.0, + location: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the two-piece normal distribution. @@ -2129,6 +2608,11 @@ def crps_2pnormal( Scale parameter of the upper half of the forecast two-piece normal distribution. mu: array_like Location parameter of the forecast two-piece normal distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -2150,6 +2634,16 @@ def crps_2pnormal( """ B = backends.active if backend is None else backends[backend] obs, scale1, scale2, location = map(B.asarray, (obs, scale1, scale2, location)) + if check_pars: + if B.any(scale1 <= 0): + raise ValueError( + "`scale1` contains non-positive entries. The scale parameters of the two-piece normal distribution must be positive." + ) + if B.any(scale2 <= 0): + raise ValueError( + "`scale2` contains non-positive entries. The scale parameters of the two-piece normal distribution must be positive." + ) + lower = float("-inf") upper = 0.0 lmass = 0.0 @@ -2174,6 +2668,7 @@ def crps_poisson( mean: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the Poisson distribution. @@ -2193,6 +2688,11 @@ def crps_poisson( The observed values. mean : array_like Mean parameter of the forecast poisson distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -2211,6 +2711,13 @@ def crps_poisson( >>> sr.crps_poisson(1, 2) 0.4991650450203817 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + mean = B.asarray(mean) + if B.any(mean <= 0): + raise ValueError( + "`mean` contains non-positive entries. The mean parameter of the Poisson distribution must be positive." + ) return crps.poisson(obs, mean, backend=backend) @@ -2221,6 +2728,7 @@ def crps_t( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the student's t distribution. @@ -2245,8 +2753,13 @@ def crps_t( Degrees of freedom parameter of the forecast t distribution. location : array_like Location parameter of the forecast t distribution. - sigma : array_like + scale : array_like Scale parameter of the forecast t distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -2266,17 +2779,31 @@ def crps_t( >>> sr.crps_t(0.0, 0.1, 0.4, 0.1) 0.07687151141732129 """ + if backend == "torch": + raise TypeError("Torch backend is not supported for the t distribution.") + if check_pars: + B = backends.active if backend is None else backends[backend] + df, scale = map(B.asarray, (df, scale)) + if B.any(df <= 0): + raise ValueError( + "`df` contains non-positive entries. The degrees of freedom parameter of the t distribution must be positive." + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the t distribution must be positive." + ) return crps.t(obs, df, location, scale, backend=backend) def crps_uniform( obs: "ArrayLike", - min: "ArrayLike", - max: "ArrayLike", + min: "ArrayLike" = 0.0, + max: "ArrayLike" = 1.0, lmass: "ArrayLike" = 0.0, umass: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the closed form of the CRPS for the uniform distribution. @@ -2304,6 +2831,11 @@ def crps_uniform( Point mass on the lower bound of the forecast uniform distribution. umass : array_like Point mass on the upper bound of the forecast uniform distribution. + backend : str, optional + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -2323,6 +2855,17 @@ def crps_uniform( >>> sr.crps_uniform(0.4, 0.0, 1.0, 0.0, 0.0) 0.09333333333333332 """ + if check_pars: + B = backends.active if backend is None else backends[backend] + lmass, umass, min, max = map(B.asarray, (lmass, umass, min, max)) + if B.any(lmass < 0) or B.any(lmass > 1): + raise ValueError("`lmass` contains entries outside the range [0, 1].") + if B.any(umass < 0) or B.any(umass > 1): + raise ValueError("`umass` contains entries outside the range [0, 1].") + if B.any(umass + lmass >= 1): + raise ValueError("The sum of `umass` and `lmass` should be smaller than 1.") + if B.any(min >= max): + raise ValueError("`min` is not always smaller than `max`.") return crps.uniform(obs, min, max, lmass, umass, backend=backend) diff --git a/scoringrules/_energy.py b/scoringrules/_energy.py index fdc101a..3bfaacc 100644 --- a/scoringrules/_energy.py +++ b/scoringrules/_energy.py @@ -219,10 +219,10 @@ def vres_ensemble( obs: "Array", fct: "Array", w_func: tp.Callable[["ArrayLike"], "ArrayLike"], - *, - ens_w: "Array" = None, m_axis: int = -2, v_axis: int = -1, + *, + ens_w: "Array" = None, backend: "Backend" = None, ) -> "Array": r"""Compute the Vertically Re-scaled Energy Score (vrES) for a finite multivariate ensemble. diff --git a/scoringrules/_logs.py b/scoringrules/_logs.py index 85c7694..4d70381 100644 --- a/scoringrules/_logs.py +++ b/scoringrules/_logs.py @@ -155,6 +155,7 @@ def logs_beta( upper: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the beta distribution. @@ -173,7 +174,10 @@ def logs_beta( upper : array_like Upper bound of the forecast beta distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -185,6 +189,19 @@ def logs_beta( >>> import scoringrules as sr >>> sr.logs_beta(0.3, 0.7, 1.1) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + a, b, lower, upper = map(B.asarray, (a, b, lower, upper)) + if B.any(a <= 0): + raise ValueError( + "`a` contains non-positive entries. The shape parameters of the Beta distribution must be positive." + ) + if B.any(b <= 0): + raise ValueError( + "`b` contains non-positive entries. The shape parameters of the Beta distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return logarithmic.beta(obs, a, b, lower, upper, backend=backend) @@ -194,6 +211,7 @@ def logs_binomial( prob: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the binomial distribution. @@ -204,11 +222,14 @@ def logs_binomial( obs : array_like The observed values. n : array_like - Size parameter of the forecast binomial distribution as an integer or array of integers. + Size parameter of the forecast binomial distribution as an integer or array of positive integers. prob : array_like Probability parameter of the forecast binomial distribution as a float or array of floats. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -220,6 +241,19 @@ def logs_binomial( >>> import scoringrules as sr >>> sr.logs_binomial(4, 10, 0.5) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + prob = B.asarray(prob) + if B.any(prob < 0) or B.any(prob > 1): + raise ValueError("`prob` contains values outside the range [0, 1].") + if B.any(n <= 0): + raise ValueError( + "`n` contains non-positive entries. The size parameter of the binomial distribution must be positive." + ) + if not B.all_integer(n): + raise ValueError( + "`n` contains non-integer entries. The size parameter of the binomial distribution must be an integer." + ) return logarithmic.binomial(obs, n, prob, backend=backend) @@ -228,6 +262,7 @@ def logs_exponential( rate: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the exponential distribution. @@ -240,7 +275,10 @@ def logs_exponential( rate : array_like Rate parameter of the forecast exponential distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -252,6 +290,13 @@ def logs_exponential( >>> import scoringrules as sr >>> sr.logs_exponential(0.8, 3.0) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + rate = B.asarray(rate) + if B.any(rate <= 0): + raise ValueError( + "`rate` contains non-positive entries. The rate parameter of the exponential distribution must be positive." + ) return logarithmic.exponential(obs, rate, backend=backend) @@ -261,6 +306,7 @@ def logs_exponential2( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the exponential distribution with location and scale parameters. @@ -275,7 +321,10 @@ def logs_exponential2( scale : array_like Scale parameter of the forecast exponential distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -287,6 +336,13 @@ def logs_exponential2( >>> import scoringrules as sr >>> sr.logs_exponential2(0.2, 0.0, 1.0) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale = B.asarray(scale) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the exponential distribution must be positive." + ) return logarithmic.exponential2(obs, location, scale, backend=backend) @@ -297,6 +353,7 @@ def logs_2pexponential( location: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the two-piece exponential distribution. @@ -313,7 +370,10 @@ def logs_2pexponential( location : array_like Location parameter of the forecast two-piece exponential distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -325,6 +385,17 @@ def logs_2pexponential( >>> import scoringrules as sr >>> sr.logs_2pexponential(0.8, 3.0, 1.4, 0.0) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale1, scale2 = map(B.asarray, (scale1, scale2)) + if B.any(scale1 <= 0): + raise ValueError( + "`scale1` contains non-positive entries. The scale parameters of the two-piece exponential distribution must be positive." + ) + if B.any(scale2 <= 0): + raise ValueError( + "`scale2` contains non-positive entries. The scale parameters of the two-piece exponential distribution must be positive." + ) return logarithmic.twopexponential(obs, scale1, scale2, location, backend=backend) @@ -335,6 +406,7 @@ def logs_gamma( *, scale: "ArrayLike | None" = None, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the gamma distribution. @@ -350,6 +422,11 @@ def logs_gamma( Rate parameter of the forecast gamma distribution. scale : array_like Scale parameter of the forecast gamma distribution, where `scale = 1 / rate`. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -374,6 +451,18 @@ def logs_gamma( if rate is None: rate = 1.0 / scale + if check_pars: + B = backends.active if backend is None else backends[backend] + shape, rate = map(B.asarray, (shape, rate)) + if B.any(shape <= 0): + raise ValueError( + "`shape` contains non-positive entries. The shape parameter of the gamma distribution must be positive." + ) + if B.any(rate <= 0): + raise ValueError( + "`rate` or `scale` contains non-positive entries. The rate and scale parameters of the gamma distribution must be positive." + ) + return logarithmic.gamma(obs, shape, rate, backend=backend) @@ -384,6 +473,7 @@ def logs_gev( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the generalised extreme value (GEV) distribution. @@ -400,7 +490,10 @@ def logs_gev( scale : array_like Scale parameter of the forecast GEV distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -412,6 +505,13 @@ def logs_gev( >>> import scoringrules as sr >>> sr.logs_gev(0.3, 0.1) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, shape = map(B.asarray, (scale, shape)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the GEV distribution must be positive." + ) return logarithmic.gev(obs, shape, location, scale, backend=backend) @@ -422,6 +522,7 @@ def logs_gpd( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the generalised Pareto distribution (GPD). @@ -439,7 +540,10 @@ def logs_gpd( scale : array_like Scale parameter of the forecast GPD distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -451,6 +555,13 @@ def logs_gpd( >>> import scoringrules as sr >>> sr.logs_gpd(0.3, 0.9) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, shape = map(B.asarray, (scale, shape)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the GPD distribution must be positive. `nan` is returned in these places." + ) return logarithmic.gpd(obs, shape, location, scale, backend=backend) @@ -461,6 +572,7 @@ def logs_hypergeometric( k: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the hypergeometric distribution. @@ -477,7 +589,10 @@ def logs_hypergeometric( k : array_like Number of draws, without replacement. Must be in 0, 1, ..., m + n. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -489,6 +604,36 @@ def logs_hypergeometric( >>> import scoringrules as sr >>> sr.logs_hypergeometric(5, 7, 13, 12) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + if B.any(m < 0): + raise ValueError( + "`m` contains negative entries. The number of successes in the hypergeometric distribution must be non-negative." + ) + if B.any(n < 0): + raise ValueError( + "`n` contains negative entries. The number of failures in the hypergeometric distribution must be non-negative." + ) + if B.any(k < 0): + raise ValueError( + "`k` contains negative entries. The number of draws in the hypergeometric distribution must be non-negative." + ) + if B.any(k > m + n): + raise ValueError( + "`k` contains values larger than `m + n`. The number of draws in the hypergeometric distribution must be between the number of successes and failures." + ) + if not B.all_integer(m): + raise ValueError( + "`m` contains non-integer entries. The number of successes in the hypergeometric distribution must be an integer." + ) + if not B.all_integer(n): + raise ValueError( + "`n` contains non-integer entries. The number of failures in the hypergeometric distribution must be an integer." + ) + if not B.all_integer(k): + raise ValueError( + "`k` contains non-integer entries. The number of draws in the hypergeometric distribution must be an integer." + ) return logarithmic.hypergeometric(obs, m, n, k, backend=backend) @@ -498,6 +643,7 @@ def logs_laplace( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the Laplace distribution. @@ -513,6 +659,11 @@ def logs_laplace( scale : array_like Scale parameter of the forecast laplace distribution. The LS between obs and Laplace(location, scale). + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -524,6 +675,13 @@ def logs_laplace( >>> import scoringrules as sr >>> sr.logs_laplace(0.3, 0.1, 0.2) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale = B.asarray(scale) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the laplace must be positive." + ) return logarithmic.laplace(obs, location, scale, backend=backend) @@ -533,6 +691,7 @@ def logs_loglaplace( scalelog: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the log-Laplace distribution. @@ -546,6 +705,11 @@ def logs_loglaplace( Location parameter of the forecast log-laplace distribution. scalelog : array_like Scale parameter of the forecast log-laplace distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -557,15 +721,23 @@ def logs_loglaplace( >>> import scoringrules as sr >>> sr.logs_loglaplace(3.0, 0.1, 0.9) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scalelog = B.asarray(scalelog) + if B.any(scalelog <= 0) or B.any(scalelog >= 1): + raise ValueError( + "`scalelog` contains entries outside of the range (0, 1). The scale parameter of the log-laplace distribution must be between 0 and 1." + ) return logarithmic.loglaplace(obs, locationlog, scalelog, backend=backend) def logs_logistic( obs: "ArrayLike", - mu: "ArrayLike", - sigma: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the logistic distribution. @@ -575,29 +747,43 @@ def logs_logistic( ---------- obs : array_like Observed values. - mu : array_like + location : array_like Location parameter of the forecast logistic distribution. - sigma : array_like + scale : array_like Scale parameter of the forecast logistic distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- score : array_like - The LS for the Logistic(mu, sigma) forecasts given the observations. + The LS for the Logistic(location, scale) forecasts given the observations. Examples -------- >>> import scoringrules as sr >>> sr.logs_logistic(0.0, 0.4, 0.1) """ - return logarithmic.logistic(obs, mu, sigma, backend=backend) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale = B.asarray(scale) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the logistic distribution must be positive." + ) + return logarithmic.logistic(obs, location, scale, backend=backend) def logs_loglogistic( obs: "ArrayLike", - mulog: "ArrayLike", - sigmalog: "ArrayLike", + locationlog: "ArrayLike", + scalelog: "ArrayLike", + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the log-logistic distribution. @@ -607,31 +793,43 @@ def logs_loglogistic( ---------- obs : array_like The observed values. - mulog : array_like + locationlog : array_like Location parameter of the log-logistic distribution. - sigmalog : array_like + scalelog : array_like Scale parameter of the log-logistic distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- score : array_like - The LS between obs and Loglogis(mulog, sigmalog). + The LS between obs and Loglogis(locationlog, scalelog). Examples -------- >>> import scoringrules as sr >>> sr.logs_loglogistic(3.0, 0.1, 0.9) """ - return logarithmic.loglogistic(obs, mulog, sigmalog, backend=backend) + if check_pars: + B = backends.active if backend is None else backends[backend] + scalelog = B.asarray(scalelog) + if B.any(scalelog <= 0) or B.any(scalelog >= 1): + raise ValueError( + "`scalelog` contains entries outside of the range (0, 1). The scale parameter of the log-logistic distribution must be between 0 and 1." + ) + return logarithmic.loglogistic(obs, locationlog, scalelog, backend=backend) def logs_lognormal( obs: "ArrayLike", mulog: "ArrayLike", sigmalog: "ArrayLike", + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the log-normal distribution. @@ -645,6 +843,11 @@ def logs_lognormal( Mean of the normal underlying distribution. sigmalog : array_like Standard deviation of the underlying normal distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -656,6 +859,13 @@ def logs_lognormal( >>> import scoringrules as sr >>> sr.logs_lognormal(0.0, 0.4, 0.1) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + sigmalog = B.asarray(sigmalog) + if B.any(sigmalog <= 0): + raise ValueError( + "`sigmalog` contains non-positive entries. The scale parameter of the log-normal distribution must be positive." + ) return logarithmic.lognormal(obs, mulog, sigmalog, backend=backend) @@ -664,9 +874,10 @@ def logs_mixnorm( m: "ArrayLike", s: "ArrayLike", w: "ArrayLike" = None, - mc_axis: "ArrayLike" = -1, + m_axis: "ArrayLike" = -1, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score for a mixture of normal distributions. @@ -682,10 +893,13 @@ def logs_mixnorm( Standard deviations of the component normal distributions. w : array_like Non-negative weights assigned to each component. - mc_axis : int + m_axis : int The axis corresponding to the mixture components. Default is the last axis. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -701,15 +915,24 @@ def logs_mixnorm( obs, m, s = map(B.asarray, (obs, m, s)) if w is None: - M: int = m.shape[mc_axis] + M: int = m.shape[m_axis] w = B.ones(m.shape) / M else: w = B.asarray(w) + w = w / B.sum(w, axis=m_axis, keepdims=True) - if mc_axis != -1: - m = B.moveaxis(m, mc_axis, -1) - s = B.moveaxis(s, mc_axis, -1) - w = B.moveaxis(w, mc_axis, -1) + if check_pars: + if B.any(s <= 0): + raise ValueError( + "`s` contains non-positive entries. The scale parameters of the normal distributions should be positive." + ) + if B.any(w < 0): + raise ValueError("`w` contains negative entries") + + if m_axis != -1: + m = B.moveaxis(m, m_axis, -1) + s = B.moveaxis(s, m_axis, -1) + w = B.moveaxis(w, m_axis, -1) return logarithmic.mixnorm(obs, m, s, w, backend=backend) @@ -718,9 +941,10 @@ def logs_negbinom( obs: "ArrayLike", n: "ArrayLike", prob: "ArrayLike | None" = None, - *, mu: "ArrayLike | None" = None, + *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the negative binomial distribution. @@ -736,6 +960,11 @@ def logs_negbinom( Probability parameter of the forecast negative binomial distribution. mu : array_like Mean of the forecast negative binomial distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -752,10 +981,34 @@ def logs_negbinom( ValueError If both `prob` and `mu` are provided, or if neither is provided. """ + if backend in ["torch", "tensorflow", "jax"]: + raise TypeError( + "Torch, Tensorflow, and JAX backends are not supported for the Negative Binomial distribution." + ) if (prob is None and mu is None) or (prob is not None and mu is not None): raise ValueError( "Either `prob` or `mu` must be provided, but not both or neither." ) + if check_pars: + B = backends.active if backend is None else backends[backend] + if prob is not None: + prob = B.asarray(prob) + if B.any(prob < 0) or B.any(prob > 1): + raise ValueError("`prob` contains values outside the range [0, 1].") + else: + mu = B.asarray(mu) + if B.any(mu < 0): + raise ValueError( + "`mu` contains negative values. The mean of the negative binomial distribution must be non-negative." + ) + if B.any(n <= 0): + raise ValueError( + "`n` contains non-positive entries. The size parameter of the negative binomial distribution must be positive." + ) + if not B.all_integer(n): + raise ValueError( + "`n` contains non-integer entries. The size parameter of the negative binomial distribution must be an integer." + ) if prob is None: prob = n / (n + mu) @@ -765,15 +1018,15 @@ def logs_negbinom( def logs_normal( obs: "ArrayLike", - mu: "ArrayLike", - sigma: "ArrayLike", + mu: "ArrayLike" = 0.0, + sigma: "ArrayLike" = 1.0, *, - negative: bool = True, backend: "Backend" = None, + check_pars: bool = False, ) -> "Array": r"""Compute the logarithmic score (LS) for the normal distribution. - This score is equivalent to the (negative) log likelihood (if `negative = True`) + This score is equivalent to the negative log likelihood of a normal distribution. Parameters ---------- @@ -783,8 +1036,11 @@ def logs_normal( Mean of the forecast normal distribution. sigma : array_like Standard deviation of the forecast normal distribution. - backend : str, optional - The backend used for computations. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -796,16 +1052,24 @@ def logs_normal( >>> import scoringrules as sr >>> sr.logs_normal(0.0, 0.4, 0.1) """ - return logarithmic.normal(obs, mu, sigma, negative=negative, backend=backend) + if check_pars: + B = backends.active if backend is None else backends[backend] + sigma = B.asarray(sigma) + if B.any(sigma <= 0): + raise ValueError( + "`sigma` contains non-positive entries. The standard deviation of the normal distribution must be positive." + ) + return logarithmic.normal(obs, mu, sigma, backend=backend) def logs_2pnormal( obs: "ArrayLike", - scale1: "ArrayLike", - scale2: "ArrayLike", - location: "ArrayLike", + scale1: "ArrayLike" = 1.0, + scale2: "ArrayLike" = 1.0, + location: "ArrayLike" = 0.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the two-piece normal distribution. @@ -822,7 +1086,10 @@ def logs_2pnormal( location : array_like Location parameter of the forecast two-piece normal distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -833,6 +1100,17 @@ def logs_2pnormal( >>> import scoringrules as sr >>> sr.logs_2pnormal(0.0, 0.4, 2.0, 0.1) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale1, scale2 = map(B.asarray, (scale1, scale2)) + if B.any(scale1 <= 0): + raise ValueError( + "`scale1` contains non-positive entries. The scale parameters of the two-piece normal distribution must be positive." + ) + if B.any(scale2 <= 0): + raise ValueError( + "`scale2` contains non-positive entries. The scale parameters of the two-piece normal distribution must be positive." + ) return logarithmic.twopnormal(obs, scale1, scale2, location, backend=backend) @@ -841,6 +1119,7 @@ def logs_poisson( mean: "ArrayLike", *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the Poisson distribution. @@ -853,7 +1132,10 @@ def logs_poisson( mean : array_like Mean parameter of the forecast poisson distribution. backend : str - The name of the backend used for computations. Defaults to 'numba' if available, else 'numpy'. + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -865,6 +1147,13 @@ def logs_poisson( >>> import scoringrules as sr >>> sr.logs_poisson(1, 2) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + mean = B.asarray(mean) + if B.any(mean <= 0): + raise ValueError( + "`mean` contains non-positive entries. The mean parameter of the Poisson distribution must be positive." + ) return logarithmic.poisson(obs, mean, backend=backend) @@ -875,6 +1164,7 @@ def logs_t( scale: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the Student's t distribution. @@ -890,6 +1180,11 @@ def logs_t( Location parameter of the forecast t distribution. sigma : array_like Scale parameter of the forecast t distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -901,17 +1196,31 @@ def logs_t( >>> import scoringrules as sr >>> sr.logs_t(0.0, 0.1, 0.4, 0.1) """ + if backend == "torch": + raise TypeError("Torch backend is not supported for the t distribution.") + if check_pars: + B = backends.active if backend is None else backends[backend] + df, scale = map(B.asarray, (df, scale)) + if B.any(df <= 0): + raise ValueError( + "`df` contains non-positive entries. The degrees of freedom parameter of the t distribution must be positive." + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the t distribution must be positive." + ) return logarithmic.t(obs, df, location, scale, backend=backend) def logs_tlogistic( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the truncated logistic distribution. @@ -929,6 +1238,11 @@ def logs_tlogistic( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -940,17 +1254,27 @@ def logs_tlogistic( >>> import scoringrules as sr >>> sr.logs_tlogistic(0.0, 0.1, 0.4, -1.0, 1.0) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the truncated logistic distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return logarithmic.tlogistic(obs, location, scale, lower, upper, backend=backend) def logs_tnormal( obs: "ArrayLike", - location: "ArrayLike", - scale: "ArrayLike", + location: "ArrayLike" = 0.0, + scale: "ArrayLike" = 1.0, lower: "ArrayLike" = float("-inf"), upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the truncated normal distribution. @@ -968,6 +1292,11 @@ def logs_tnormal( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -979,6 +1308,15 @@ def logs_tnormal( >>> import scoringrules as sr >>> sr.logs_tnormal(0.0, 0.1, 0.4, -1.0, 1.0) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the truncated normal distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return logarithmic.tnormal(obs, location, scale, lower, upper, backend=backend) @@ -991,6 +1329,7 @@ def logs_tt( upper: "ArrayLike" = float("inf"), *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the truncated Student's t distribution. @@ -1010,6 +1349,11 @@ def logs_tt( Lower boundary of the truncated forecast distribution. upper : array_like Upper boundary of the truncated forecast distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1021,15 +1365,33 @@ def logs_tt( >>> import scoringrules as sr >>> sr.logs_tt(0.0, 2.0, 0.1, 0.4, -1.0, 1.0) """ + if backend in ["torch", "tensorflow", "jax"]: + raise TypeError( + "Torch, Tensorflow, and JAX backends are not supported for the Truncated t distribution." + ) + if check_pars: + B = backends.active if backend is None else backends[backend] + scale, lower, upper = map(B.asarray, (scale, lower, upper)) + if B.any(df <= 0): + raise ValueError( + "`df` contains non-positive entries. The degrees of freedom parameter of the truncated t distribution must be positive." + ) + if B.any(scale <= 0): + raise ValueError( + "`scale` contains non-positive entries. The scale parameter of the truncated t distribution must be positive." + ) + if B.any(lower >= upper): + raise ValueError("`lower` is not always smaller than `upper`.") return logarithmic.tt(obs, df, location, scale, lower, upper, backend=backend) def logs_uniform( obs: "ArrayLike", - min: "ArrayLike", - max: "ArrayLike", + min: "ArrayLike" = 0.0, + max: "ArrayLike" = 1.0, *, backend: "Backend" = None, + check_pars: bool = False, ) -> "ArrayLike": r"""Compute the logarithmic score (LS) for the uniform distribution. @@ -1043,6 +1405,11 @@ def logs_uniform( Lower bound of the forecast uniform distribution. max : array_like Upper bound of the forecast uniform distribution. + backend : str + The name of the backend used for computations. Defaults to ``numba`` if available, else ``numpy``. + check_pars: bool + Boolean indicating whether distribution parameter checks should be carried out prior to implementation. + Default is False. Returns ------- @@ -1054,6 +1421,11 @@ def logs_uniform( >>> import scoringrules as sr >>> sr.logs_uniform(0.4, 0.0, 1.0) """ + if check_pars: + B = backends.active if backend is None else backends[backend] + min, max = map(B.asarray, (min, max)) + if B.any(min >= max): + raise ValueError("`min` is not always smaller than `max`.") return logarithmic.uniform(obs, min, max, backend=backend) diff --git a/scoringrules/backend/base.py b/scoringrules/backend/base.py index 1f99917..ba58843 100644 --- a/scoringrules/backend/base.py +++ b/scoringrules/backend/base.py @@ -342,3 +342,7 @@ def det(self, x: "Array") -> "Array": @abc.abstractmethod def reshape(self, x: "Array", shape: int | tuple[int, ...]) -> "Array": """Reshape an array to a new ``shape``.""" + + @abc.abstractmethod + def all_integer(self, x: "Array") -> bool: + """Check whether all entries of an array are integers""" diff --git a/scoringrules/backend/jax.py b/scoringrules/backend/jax.py index 9cf4074..d13f324 100644 --- a/scoringrules/backend/jax.py +++ b/scoringrules/backend/jax.py @@ -304,6 +304,9 @@ def det(self, x: "Array") -> "Array": def reshape(self, x: "Array", shape: int | tuple[int, ...]) -> "Array": return jnp.reshape(x, shape) + def all_integer(self, x: "Array"): + return jnp.all(x % 1 == 0).item() + if __name__ == "__main__": B = JaxBackend() diff --git a/scoringrules/backend/numpy.py b/scoringrules/backend/numpy.py index 6cbfc08..e7fd3a8 100644 --- a/scoringrules/backend/numpy.py +++ b/scoringrules/backend/numpy.py @@ -301,6 +301,9 @@ def det(self, x: "NDArray") -> "NDArray": def reshape(self, x: "NDArray", shape: int | tuple[int, ...]) -> "NDArray": return np.reshape(x, shape) + def all_integer(self, x: "NDArray") -> bool: + return np.all(np.mod(x, 1) == 0) + class NumbaBackend(NumpyBackend): """Numba backend.""" diff --git a/scoringrules/backend/tensorflow.py b/scoringrules/backend/tensorflow.py index 9d58712..698fb07 100644 --- a/scoringrules/backend/tensorflow.py +++ b/scoringrules/backend/tensorflow.py @@ -352,6 +352,9 @@ def det(self, x: "Tensor") -> "Tensor": def reshape(self, x: "Tensor", shape: int | tuple[int, ...]) -> "Tensor": return tf.reshape(x, shape) + def all_integer(self, x: "Tensor") -> bool: + return tf.reduce_all(tf.math.mod(x, 1) == 0).numpy().item() + if __name__ == "__main__": B = TensorflowBackend() diff --git a/scoringrules/backend/torch.py b/scoringrules/backend/torch.py index 1eb141c..3274541 100644 --- a/scoringrules/backend/torch.py +++ b/scoringrules/backend/torch.py @@ -328,3 +328,6 @@ def det(self, x: "Tensor") -> "Tensor": def reshape(self, x: "Tensor", shape: int | tuple[int, ...]) -> "Tensor": return torch.reshape(x, shape) + + def all_integer(self, x: "Tensor") -> bool: + return torch.all(x % 1 == 0).item() diff --git a/scoringrules/core/crps/_approx.py b/scoringrules/core/crps/_approx.py index 3f282a2..04c99eb 100644 --- a/scoringrules/core/crps/_approx.py +++ b/scoringrules/core/crps/_approx.py @@ -34,66 +34,84 @@ def ensemble( return out -def _crps_ensemble_fair( +def _crps_ensemble_akr( obs: "Array", fct: "Array", backend: "Backend" = None ) -> "Array": - """Fair version of the CRPS estimator based on the energy form.""" + """CRPS estimator based on the approximate kernel representation.""" B = backends.active if backend is None else backends[backend] M: int = fct.shape[-1] e_1 = B.sum(B.abs(obs[..., None] - fct), axis=-1) / M - e_2 = B.sum( - B.abs(fct[..., None] - fct[..., None, :]), - axis=(-1, -2), - ) / (M * (M - 1)) + e_2 = B.sum(B.abs(fct - B.roll(fct, shift=1, axis=-1)), -1) / M return e_1 - 0.5 * e_2 -def _crps_ensemble_nrg( +def _crps_ensemble_akr_circperm( obs: "Array", fct: "Array", backend: "Backend" = None ) -> "Array": - """CRPS estimator based on the energy form.""" + """CRPS estimator based on the AKR with cyclic permutation.""" B = backends.active if backend is None else backends[backend] M: int = fct.shape[-1] e_1 = B.sum(B.abs(obs[..., None] - fct), axis=-1) / M - e_2 = B.sum(B.abs(fct[..., None] - fct[..., None, :]), (-1, -2)) / (M**2) + shift = M // 2 + e_2 = B.sum(B.abs(fct - B.roll(fct, shift=shift, axis=-1)), -1) / M return e_1 - 0.5 * e_2 -def _crps_ensemble_pwm( +def _crps_ensemble_int( obs: "Array", fct: "Array", backend: "Backend" = None ) -> "Array": - """CRPS estimator based on the probability weighted moment (PWM) form.""" + """CRPS estimator based on the integral representation.""" B = backends.active if backend is None else backends[backend] M: int = fct.shape[-1] - expected_diff = B.sum(B.abs(obs[..., None] - fct), axis=-1) / M - β_0 = B.sum(fct, axis=-1) / M - β_1 = B.sum(fct * B.arange(0, M), axis=-1) / (M * (M - 1.0)) - return expected_diff + β_0 - 2.0 * β_1 + y_pos = B.mean((fct <= obs[..., None]) * 1.0, axis=-1, keepdims=True) + fct_cdf = B.zeros(fct.shape) + B.arange(1, M + 1) / M + fct_cdf = B.concat((fct_cdf, y_pos), axis=-1) + fct_cdf = B.sort(fct_cdf, axis=-1) + fct_exp = B.concat((fct, obs[..., None]), axis=-1) + fct_exp = B.sort(fct_exp, axis=-1) + fct_dif = fct_exp[..., 1:] - fct_exp[..., :M] + obs_cdf = (obs[..., None] <= fct_exp) * 1.0 + out = fct_dif * (fct_cdf[..., :M] - obs_cdf[..., :M]) ** 2 + return B.sum(out, axis=-1) -def _crps_ensemble_akr( +def _crps_ensemble_fair( obs: "Array", fct: "Array", backend: "Backend" = None ) -> "Array": - """CRPS estimator based on the approximate kernel representation.""" + """Fair version of the CRPS estimator based on the energy form.""" B = backends.active if backend is None else backends[backend] M: int = fct.shape[-1] e_1 = B.sum(B.abs(obs[..., None] - fct), axis=-1) / M - e_2 = B.sum(B.abs(fct - B.roll(fct, shift=1, axis=-1)), -1) / M + e_2 = B.sum( + B.abs(fct[..., None] - fct[..., None, :]), + axis=(-1, -2), + ) / (M * (M - 1)) return e_1 - 0.5 * e_2 -def _crps_ensemble_akr_circperm( +def _crps_ensemble_nrg( obs: "Array", fct: "Array", backend: "Backend" = None ) -> "Array": - """CRPS estimator based on the AKR with cyclic permutation.""" + """CRPS estimator based on the energy form.""" B = backends.active if backend is None else backends[backend] M: int = fct.shape[-1] e_1 = B.sum(B.abs(obs[..., None] - fct), axis=-1) / M - shift = M // 2 - e_2 = B.sum(B.abs(fct - B.roll(fct, shift=shift, axis=-1)), -1) / M + e_2 = B.sum(B.abs(fct[..., None] - fct[..., None, :]), (-1, -2)) / (M**2) return e_1 - 0.5 * e_2 +def _crps_ensemble_pwm( + obs: "Array", fct: "Array", backend: "Backend" = None +) -> "Array": + """CRPS estimator based on the probability weighted moment (PWM) form.""" + B = backends.active if backend is None else backends[backend] + M: int = fct.shape[-1] + expected_diff = B.sum(B.abs(obs[..., None] - fct), axis=-1) / M + β_0 = B.sum(fct, axis=-1) / M + β_1 = B.sum(fct * B.arange(0, M), axis=-1) / (M * (M - 1.0)) + return expected_diff + β_0 - 2.0 * β_1 + + def _crps_ensemble_qd(obs: "Array", fct: "Array", backend: "Backend" = None) -> "Array": """CRPS estimator based on the quantile decomposition form.""" B = backends.active if backend is None else backends[backend] @@ -105,28 +123,10 @@ def _crps_ensemble_qd(obs: "Array", fct: "Array", backend: "Backend" = None) -> return 2 * out -def _crps_ensemble_int( - obs: "Array", fct: "Array", backend: "Backend" = None -) -> "Array": - """CRPS estimator based on the integral representation.""" - B = backends.active if backend is None else backends[backend] - M: int = fct.shape[-1] - y_pos = B.mean((fct <= obs[..., None]) * 1.0, axis=-1, keepdims=True) - fct_cdf = B.zeros(fct.shape) + B.arange(1, M + 1) / M - fct_cdf = B.concat((fct_cdf, y_pos), axis=-1) - fct_cdf = B.sort(fct_cdf, axis=-1) - fct_exp = B.concat((fct, obs[..., None]), axis=-1) - fct_exp = B.sort(fct_exp, axis=-1) - fct_dif = fct_exp[..., 1:] - fct_exp[..., :M] - obs_cdf = (obs[..., None] <= fct_exp) * 1.0 - out = fct_dif * (fct_cdf[..., :M] - obs_cdf[..., :M]) ** 2 - return B.sum(out, axis=-1) - - def quantile_pinball( obs: "Array", fct: "Array", alpha: "Array", backend: "Backend" = None ) -> "Array": - """CRPS approximation via Pinball Loss.""" + """CRPS estimator based on a sample of quantiles and the quantile/pinball loss.""" B = backends.active if backend is None else backends[backend] below = (fct <= obs[..., None]) * alpha * (obs[..., None] - fct) above = (fct > obs[..., None]) * (1 - alpha) * (fct - obs[..., None]) @@ -140,16 +140,15 @@ def ow_ensemble( fw: "Array", backend: "Backend" = None, ) -> "Array": - """Outcome-Weighted CRPS estimator based on the energy form.""" + """Outcome-weighted CRPS for an ensemble forecast.""" B = backends.active if backend is None else backends[backend] - M: int = fct.shape[-1] wbar = B.mean(fw, axis=-1) - e_1 = B.sum(B.abs(obs[..., None] - fct) * fw, axis=-1) * ow / (M * wbar) - e_2 = B.sum( + e_1 = B.mean(B.abs(obs[..., None] - fct) * fw, axis=-1) * ow / wbar + e_2 = B.mean( B.abs(fct[..., None] - fct[..., None, :]) * fw[..., None] * fw[..., None, :], axis=(-1, -2), ) - e_2 *= ow / (M**2 * wbar**2) + e_2 *= ow / (wbar**2) return e_1 - 0.5 * e_2 @@ -160,15 +159,14 @@ def vr_ensemble( fw: "Array", backend: "Backend" = None, ) -> "Array": - """Vertically Re-scaled CRPS estimator based on the energy form.""" + """Vertically re-scaled CRPS for an ensemble forecast.""" B = backends.active if backend is None else backends[backend] - M: int = fct.shape[-1] - e_1 = B.sum(B.abs(obs[..., None] - fct) * fw, axis=-1) * ow / M - e_2 = B.sum( + e_1 = B.mean(B.abs(obs[..., None] - fct) * fw, axis=-1) * ow + e_2 = B.mean( B.abs(B.expand_dims(fct, axis=-1) - B.expand_dims(fct, axis=-2)) * (B.expand_dims(fw, axis=-1) * B.expand_dims(fw, axis=-2)), axis=(-1, -2), - ) / (M**2) + ) e_3 = B.mean(B.abs(fct) * fw, axis=-1) - B.abs(obs) * ow e_3 *= B.mean(fw, axis=1) - ow return e_1 - 0.5 * e_2 + e_3 diff --git a/scoringrules/core/crps/_closed.py b/scoringrules/core/crps/_closed.py index 713c6aa..24ee6e7 100644 --- a/scoringrules/core/crps/_closed.py +++ b/scoringrules/core/crps/_closed.py @@ -35,28 +35,20 @@ def beta( """Compute the CRPS for the beta distribution.""" B = backends.active if backend is None else backends[backend] obs, a, b, lower, upper = map(B.asarray, (obs, a, b, lower, upper)) + a = B.where(a > 0.0, a, B.nan) + b = B.where(b > 0.0, b, B.nan) + lower = B.where(lower < upper, lower, B.nan) - if _is_scalar_value(lower, 0.0) and _is_scalar_value(upper, 1.0): - special_limits = False - else: - if B.any(lower >= upper): - raise ValueError("lower must be less than upper") - special_limits = True + obs = (obs - lower) / (upper - lower) + obs_std = B.minimum(B.maximum(obs, 0.0), 1.0) - if special_limits: - obs = (obs - lower) / (upper - lower) + F_ab = B.betainc(a, b, obs_std) + F_a1b = B.betainc(a + 1, b, obs_std) - I_ab = B.betainc(a, b, obs) - I_a1b = B.betainc(a + 1, b, obs) - F_ab = B.minimum(B.maximum(I_ab, 0), 1) - F_a1b = B.minimum(B.maximum(I_a1b, 0), 1) bet_rat = 2 * B.beta(2 * a, 2 * b) / (a * B.beta(a, b) ** 2) s = obs * (2 * F_ab - 1) + (a / (a + b)) * (1 - 2 * F_a1b - bet_rat) - if special_limits: - s = s * (upper - lower) - - return s + return s * (upper - lower) def binomial( @@ -133,26 +125,18 @@ def exponential( def exponentialM( obs: "ArrayLike", - mass: "ArrayLike", location: "ArrayLike", scale: "ArrayLike", + mass: "ArrayLike", backend: "Backend" = None, ) -> "Array": """Compute the CRPS for the standard exponential distribution with a point mass at the boundary.""" B = backends.active if backend is None else backends[backend] obs, location, scale, mass = map(B.asarray, (obs, location, scale, mass)) - - if not _is_scalar_value(location, 0.0): - obs -= location - - a = 1.0 if _is_scalar_value(mass, 0.0) else 1 - mass + obs -= location + a = 1.0 - mass s = B.abs(obs) - - if _is_scalar_value(scale, 1.0): - s -= a * (2 * _exp_cdf(obs, 1.0, backend=backend) - 0.5 * a) - else: - s -= scale * a * (2 * _exp_cdf(obs, 1 / scale, backend=backend) - 0.5 * a) - + s -= scale * a * (2 * _exp_cdf(obs, 1 / scale, backend=backend) - 0.5 * a) return s @@ -291,6 +275,7 @@ def gpd( ) shape = B.where(shape < 1.0, shape, B.nan) mass = B.where((mass >= 0.0) & (mass <= 1.0), mass, B.nan) + scale = B.where(scale > 0.0, scale, B.nan) ω = (obs - location) / scale F_xi = _gpd_cdf(ω, shape, backend=backend) s = ( @@ -317,38 +302,40 @@ def gtclogistic( B.asarray, (obs, location, scale, lower, upper, lmass, umass) ) ω = (obs - mu) / sigma - u = (upper - mu) / sigma l = (lower - mu) / sigma + u = (upper - mu) / sigma z = B.minimum(B.maximum(ω, l), u) + + u_inf = upper == float("inf") + l_inf = lower == float("-inf") + U_pos = umass > 0.0 + L_pos = lmass > 0.0 + F_u = _logis_cdf(u, backend=backend) F_l = _logis_cdf(l, backend=backend) F_mu = _logis_cdf(-u, backend=backend) F_ml = _logis_cdf(-l, backend=backend) F_mz = _logis_cdf(-z, backend=backend) - u_inf = u == float("inf") - l_inf = l == float("-inf") - - F_mu = B.where(u_inf | l_inf, B.nan, F_mu) - F_ml = B.where(u_inf | l_inf, B.nan, F_ml) u = B.where(u_inf, B.nan, u) l = B.where(l_inf, B.nan, l) - G_u = B.where(u_inf, 0.0, u * F_u + B.log(F_mu)) - G_l = B.where(l_inf, 0.0, l * F_l + B.log(F_ml)) - H_u = B.where(u_inf, 1.0, F_u - u * F_u**2 + (1 - 2 * F_u) * B.log(F_mu)) - H_l = B.where(l_inf, 0.0, F_l - l * F_l**2 + (1 - 2 * F_l) * B.log(F_ml)) - - c = (1 - lmass - umass) / (F_u - F_l) + uU2 = B.where(~u_inf | U_pos, u * umass**2, 0.0) + lL2 = B.where(~l_inf | L_pos, l * lmass**2, 0.0) + GuU = B.where(~u_inf, (u * F_u + B.log(F_mu)) * umass, 0.0) + GlL = B.where(~l_inf, (l * F_l + B.log(F_ml)) * lmass, 0.0) + Hu = B.where(~u_inf, F_u - u * F_u**2 + (1 - 2 * F_u) * B.log(F_mu), 1.0) + Hl = B.where(~l_inf, F_l - l * F_l**2 + (1 - 2 * F_l) * B.log(F_ml), 0.0) - s1_u = B.where(u_inf & (umass == 0.0), 0.0, u * umass**2) - s1_l = B.where(l_inf & (lmass == 0.0), 0.0, l * lmass**2) + a1 = F_u - F_l + a2 = 1 - lmass - umass - s1 = B.abs(ω - z) + s1_u - s1_l - s2 = c * z * ((1 - 2 * lmass) * F_u + (1 - 2 * umass) * F_l) / (1 - lmass - umass) - s3 = c * (2 * B.log(F_mz) - 2 * G_u * umass - 2 * G_l * lmass) - s4 = c**2 * (H_u - H_l) - return sigma * (s1 - s2 - s3 - s4) + s1 = B.abs(ω - z) + uU2 - lL2 + s2 = z * ((1 - 2 * lmass) * F_u + (1 - 2 * umass) * F_l) + s3 = 2 * (B.log(F_mz) - GuU - GlL) + s4 = Hu - Hl + s = s1 - (s2 / a1) - (s3 * a2 / a1) - (s4 * (a2 / a1) ** 2) + return sigma * s def gtcnormal( @@ -390,17 +377,11 @@ def gtcnormal( c = (1 - lmass - umass) / (F_u - F_l) s1 = B.abs(ω - z) + s1_u - s1_l - s2 = ( - c - * z - * ( - 2 * F_z - - ((1 - 2 * lmass) * F_u + (1 - 2 * umass) * F_l) / (1 - lmass - umass) - ) - ) - s3 = c * (2 * f_z - 2 * f_u * umass - 2 * f_l * lmass) - s4 = c**2 * (F_u2 - F_l2) / B.sqrt(B.pi) - return sigma * (s1 + s2 + s3 - s4) + s2 = c * z * 2 * F_z + s3 = z * ((1 - 2 * lmass) * F_u + (1 - 2 * umass) * F_l) / (F_u - F_l) + s4 = c * (2 * f_z - 2 * f_u * umass - 2 * f_l * lmass) + s5 = c**2 * (F_u2 - F_l2) / B.sqrt(B.pi) + return sigma * (s1 + s2 - s3 + s4 - s5) def gtct( @@ -458,17 +439,11 @@ def gtct( c = (1 - lmass - umass) / (F_u - F_l) s1 = B.abs(ω - z) + s1_u - s1_l - s2 = ( - c - * z - * ( - 2 * F_z - - ((1 - 2 * lmass) * F_u + (1 - 2 * umass) * F_l) / (1 - lmass - umass) - ) - ) - s3 = 2 * c * (G_z - G_u * umass - G_l * lmass) - s4 = c**2 * Bbar * (H_u - H_l) - return sigma * (s1 + s2 - s3 - s4) + s2 = c * z * 2 * F_z + s3 = z * ((1 - 2 * lmass) * F_u + (1 - 2 * umass) * F_l) / (F_u - F_l) + s4 = 2 * c * (G_z - G_u * umass - G_l * lmass) + s5 = c**2 * Bbar * (H_u - H_l) + return sigma * (s1 + s2 - s3 - s4 - s5) def hypergeometric( @@ -561,7 +536,7 @@ def logistic( sigma: "ArrayLike", backend: "Backend" = None, ) -> "Array": - """Compute the CRPS for the normal distribution.""" + """Compute the CRPS for the logistic distribution.""" B = backends.active if backend is None else backends[backend] mu, sigma, obs = map(B.asarray, (mu, sigma, obs)) ω = (obs - mu) / sigma @@ -579,25 +554,25 @@ def loglaplace( obs, mulog, sigmalog = map(B.asarray, (obs, locationlog, scalelog)) obs, mulog, sigmalog = B.broadcast_arrays(obs, mulog, sigmalog) - logx_norm = (B.log(obs) - mulog) / sigmalog - cond_0 = obs <= 0.0 cond_1 = obs < B.exp(mulog) - F_case_0 = B.asarray(cond_0, dtype=int) - F_case_1 = B.asarray(~cond_0 & cond_1, dtype=int) + obs_pos = B.where(cond_0, B.nan, obs) + logx_norm = (B.log(obs_pos) - mulog) / sigmalog + + F_case_1 = B.asarray(cond_1 & ~cond_0, dtype=int) F_case_2 = B.asarray(~cond_1, dtype=int) - F = ( - F_case_0 * 0.0 - + F_case_1 * (0.5 * B.exp(logx_norm)) - + F_case_2 * (1 - 0.5 * B.exp(-logx_norm)) + F = B.where( + obs <= 0.0, + 0.0, + F_case_1 * (0.5 * B.exp(logx_norm)) + F_case_2 * (1 - 0.5 * B.exp(-logx_norm)), ) - A_case_0 = B.asarray(cond_1, dtype=int) - A_case_1 = B.asarray(~cond_1, dtype=int) - A = A_case_0 * 1 / (1 + sigmalog) * ( - 1 - (2 * F) ** (1 + sigmalog) - ) + A_case_1 * -1 / (1 - sigmalog) * (1 - (2 * (1 - F)) ** (1 - sigmalog)) + A = B.where( + cond_1, + 1 / (1 + sigmalog) * (1 - (2 * F) ** (1 + sigmalog)), + -1 / (1 - sigmalog) * (1 - (2 * (1 - F)) ** (1 - sigmalog)), + ) s = obs * (2 * F - 1) + B.exp(mulog) * (A + sigmalog / (4 - sigmalog**2)) return s @@ -612,7 +587,9 @@ def loglogistic( """Compute the CRPS for the log-logistic distribution.""" B = backends.active if backend is None else backends[backend] mulog, sigmalog, obs = map(B.asarray, (mulog, sigmalog, obs)) - F_ms = 1 / (1 + B.exp(-(B.log(obs) - mulog) / sigmalog)) + cond_0 = obs <= 0.0 + obs_pos = B.where(cond_0, B.nan, obs) + F_ms = B.where(cond_0, 0, 1 / (1 + B.exp(-(B.log(obs_pos) - mulog) / sigmalog))) b = B.beta(1 + sigmalog, 1 - sigmalog) I_B = B.betainc(1 + sigmalog, 1 - sigmalog, F_ms) s = obs * (2 * F_ms - 1) - B.exp(mulog) * b * (2 * I_B + sigmalog - 1) @@ -628,13 +605,15 @@ def lognormal( """Compute the CRPS for the lognormal distribution.""" B = backends.active if backend is None else backends[backend] mulog, sigmalog, obs = map(B.asarray, (mulog, sigmalog, obs)) - ω = (B.log(obs) - mulog) / sigmalog + cond_0 = obs <= 0.0 + obs_pos = B.where(cond_0, B.nan, obs) + F_s = _norm_cdf(sigmalog / B.sqrt(B.asarray(2.0)), backend=backend) + w_ms = (B.log(obs_pos) - mulog) / sigmalog + F_ms = B.where(cond_0, 0, _norm_cdf(w_ms, backend=backend)) + w_mss = (B.log(obs_pos) - mulog - sigmalog**2) / sigmalog + F_mss = B.where(cond_0, 0, _norm_cdf(w_mss, backend=backend)) ex = 2 * B.exp(mulog + sigmalog**2 / 2) - return obs * (2.0 * _norm_cdf(ω, backend=backend) - 1) - ex * ( - _norm_cdf(ω - sigmalog, backend=backend) - + _norm_cdf(sigmalog / B.sqrt(B.asarray(2.0)), backend=backend) - - 1 - ) + return obs * (2.0 * F_ms - 1) - ex * (F_mss + F_s - 1) def mixnorm( @@ -690,7 +669,7 @@ def normal( sigma: "ArrayLike", backend: "Backend" = None, ) -> "Array": - """Compute the CRPS for the logistic distribution.""" + """Compute the CRPS for the normal distribution.""" B = backends.active if backend is None else backends[backend] mu, sigma, obs = map(B.asarray, (mu, sigma, obs)) ω = (obs - mu) / sigma diff --git a/scoringrules/core/crps/_gufuncs.py b/scoringrules/core/crps/_gufuncs.py index e3ecc2b..6b46957 100644 --- a/scoringrules/core/crps/_gufuncs.py +++ b/scoringrules/core/crps/_gufuncs.py @@ -135,9 +135,9 @@ def _crps_ensemble_fair_gufunc(obs: np.ndarray, fct: np.ndarray, out: np.ndarray for i in range(M): e_1 += abs(fct[i] - obs) for j in range(i + 1, M): - e_2 += 2 * abs(fct[j] - fct[i]) + e_2 += abs(fct[j] - fct[i]) - out[0] = e_1 / M - 0.5 * e_2 / (M * (M - 1)) + out[0] = e_1 / M - e_2 / (M * (M - 1)) @guvectorize("(),(n)->()") @@ -172,6 +172,7 @@ def _crps_ensemble_akr_gufunc(obs: np.ndarray, fct: np.ndarray, out: np.ndarray) i = M - 1 e_1 += abs(forecast - obs) e_2 += abs(forecast - fct[i - 1]) + out[0] = e_1 / M - 0.5 * 1 / M * e_2 @@ -200,11 +201,9 @@ def _owcrps_ensemble_nrg_gufunc( ): """Outcome-weighted CRPS estimator based on the energy form.""" M = fct.shape[-1] - if np.isnan(obs): out[0] = np.nan return - e_1 = 0.0 e_2 = 0.0 @@ -228,11 +227,9 @@ def _vrcrps_ensemble_nrg_gufunc( ): """Vertically re-scaled CRPS estimator based on the energy form.""" M = fct.shape[-1] - if np.isnan(obs): out[0] = np.nan return - e_1 = 0.0 e_2 = 0.0 diff --git a/scoringrules/core/logarithmic.py b/scoringrules/core/logarithmic.py index cd4d7dc..42bb671 100644 --- a/scoringrules/core/logarithmic.py +++ b/scoringrules/core/logarithmic.py @@ -95,6 +95,8 @@ def binomial( obs = B.where(zind, B.nan, obs) prob = _binom_pdf(obs, n, prob, backend=backend) s = B.where(zind, float("inf"), -B.log(prob)) + ints = obs % 1 == 0 + s = B.where(ints, s, float("inf")) return s @@ -209,8 +211,13 @@ def hypergeometric( obs, m, n, k = map(B.asarray, (obs, m, n, k)) M = m + n N = k + zind = obs < 0.0 + obs = B.where(zind, B.nan, obs) prob = _hypergeo_pdf(obs, M, m, N, backend=backend) - return -B.log(prob) + s = B.where(zind, float("inf"), -B.log(prob)) + ints = obs % 1 == 0 + s = B.where(ints, s, float("inf")) + return s def laplace( @@ -329,17 +336,14 @@ def normal( obs: "ArrayLike", mu: "ArrayLike", sigma: "ArrayLike", - negative: bool = True, backend: "Backend" = None, ) -> "Array": """Compute the logarithmic score for the normal distribution.""" B = backends.active if backend is None else backends[backend] mu, sigma, obs = map(B.asarray, (mu, sigma, obs)) - - constant = -1.0 if negative else 1.0 ω = (obs - mu) / sigma prob = _norm_pdf(ω, backend=backend) / sigma - return constant * B.log(prob) + return -B.log(prob) def twopnormal( diff --git a/tests/test_crps.py b/tests/test_crps.py index 439a3cc..5c850a8 100644 --- a/tests/test_crps.py +++ b/tests/test_crps.py @@ -21,7 +21,7 @@ def test_crps_ensemble(estimator, backend): est = "undefined_estimator" sr.crps_ensemble(obs, fct, estimator=est, backend=backend) - # test shapes + # test shape res = sr.crps_ensemble(obs, fct, estimator=estimator, backend=backend) assert res.shape == (N,) res = sr.crps_ensemble( @@ -106,7 +106,7 @@ def test_crps_ensemble_corr(backend): def test_crps_quantile(backend): - # test shapes + # test shape obs = np.random.randn(N) fct = np.random.randn(N, ENSEMBLE_SIZE) alpha = np.linspace(0.1, 0.9, ENSEMBLE_SIZE) @@ -148,38 +148,86 @@ def test_crps_beta(backend): if backend == "torch": pytest.skip("Not implemented in torch backend") + ## test shape + + # one observation, multiple forecasts res = sr.crps_beta( - np.random.uniform(0, 1, (3, 3)), - np.random.uniform(0, 3, (3, 3)), - 1.1, + 0.1, + np.random.uniform(0, 3, (4, 3)), + np.random.uniform(0, 3, (4, 3)), backend=backend, ) - assert res.shape == (3, 3) - assert not np.any(np.isnan(res)) + assert res.shape == (4, 3) - # test exceptions + # multiple observations, one forecast + res = sr.crps_beta( + np.random.uniform(0, 1, (4, 3)), + 2.4, + 0.5, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_beta( + np.random.uniform(0, 1, (4, 3)), + np.random.uniform(0, 3, (4, 3)), + np.random.uniform(0, 3, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # lower bound smaller than upper bound + with pytest.raises(ValueError): + sr.crps_beta( + 0.3, 0.7, 1.1, lower=1.0, upper=0.0, backend=backend, check_pars=True + ) + return + + # lower bound equal to upper bound with pytest.raises(ValueError): - sr.crps_beta(0.3, 0.7, 1.1, lower=1.0, upper=0.0, backend=backend) + sr.crps_beta( + 0.3, 0.7, 1.1, lower=0.5, upper=0.5, backend=backend, check_pars=True + ) + return + + # negative shape parameters + with pytest.raises(ValueError): + sr.crps_beta(0.3, -0.7, 1.1, backend=backend, check_pars=True) return - # correctness tests + with pytest.raises(ValueError): + sr.crps_beta(0.3, 0.7, -1.1, backend=backend, check_pars=True) + return + + ## correctness tests + + # single forecast res = sr.crps_beta(0.3, 0.7, 1.1, backend=backend) expected = 0.0850102437 assert np.isclose(res, expected) + # custom bounds res = sr.crps_beta(-3.0, 0.7, 1.1, lower=-5.0, upper=4.0, backend=backend) expected = 0.883206751 assert np.isclose(res, expected) - # test when lower and upper are arrays + # observation outside bounds + res = sr.crps_beta(-3.0, 0.7, 1.1, lower=-1.3, upper=4.0, backend=backend) + expected = 2.880094 + assert np.isclose(res, expected) + + # multiple forecasts res = sr.crps_beta( -3.0, - 0.7, - 1.1, + np.array([0.7, 2.0]), + np.array([1.1, 4.8]), lower=np.array([-5.0, -5.0]), upper=np.array([4.0, 4.0]), ) - expected = np.array([0.883206751, 0.883206751]) + expected = np.array([0.8832068, 0.4149867]) assert np.allclose(res, expected) @@ -187,20 +235,17 @@ def test_crps_binomial(backend): if backend == "torch": pytest.skip("Not implemented in torch backend") - # test correctness - res = sr.crps_binomial(8, 10, 0.9, backend=backend) - expected = 0.6685115 - assert np.isclose(res, expected) - - res = sr.crps_binomial(-8, 10, 0.9, backend=backend) - expected = 16.49896 - assert np.isclose(res, expected) + ## test shape - res = sr.crps_binomial(18, 10, 0.9, backend=backend) - expected = 8.498957 - assert np.isclose(res, expected) + # res = sr.crps_binomial( + # np.random.randint(0, 10, size=(4, 3)), + # np.full((4, 3), 10), + # np.random.uniform(0, 1, (4, 3)), + # backend=backend, + # ) + # assert res.shape == (4, 3) + # - # test broadcasting ones = np.ones(2) k, n, p = 8, 10, 0.9 s = sr.crps_binomial(k * ones, n, p, backend=backend) @@ -214,403 +259,2218 @@ def test_crps_binomial(backend): s = sr.crps_binomial(k * ones, n, p * ones, backend=backend) assert np.isclose(s, np.array([0.6685115, 0.6685115])).all() + ## test exceptions -def test_crps_exponential(backend): - # TODO: add and test exception handling - - # test correctness - obs, rate = 3, 0.7 - res = sr.crps_exponential(obs, rate, backend=backend) - expected = 1.20701837 - assert np.isclose(res, expected) + # negative size parameter + with pytest.raises(ValueError): + sr.crps_binomial(7, -1, 0.9, backend=backend, check_pars=True) + return + # zero size parameter + with pytest.raises(ValueError): + sr.crps_binomial(2.1, 0, 0.1, backend=backend, check_pars=True) + return -def test_crps_exponentialM(backend): - obs, mass, location, scale = 0.3, 0.1, 0.0, 1.0 - res = sr.crps_exponentialM(obs, mass, location, scale, backend=backend) - expected = 0.2384728 - assert np.isclose(res, expected) + # negative prob parameter + with pytest.raises(ValueError): + sr.crps_binomial(7, 15, -0.1, backend=backend, check_pars=True) + return - obs, mass, location, scale = 0.3, 0.1, -2.0, 3.0 - res = sr.crps_exponentialM(obs, mass, location, scale, backend=backend) - expected = 0.6236187 - assert np.isclose(res, expected) + # prob parameter greater than one + with pytest.raises(ValueError): + sr.crps_binomial(1, 10, 1.1, backend=backend, check_pars=True) + return - obs, mass, location, scale = -1.2, 0.1, -2.0, 3.0 - res = sr.crps_exponentialM(obs, mass, location, scale, backend=backend) - expected = 0.751013 - assert np.isclose(res, expected) + # non-integer size parameter + with pytest.raises(ValueError): + sr.crps_binomial(4, 8.99, 0.5, backend=backend, check_pars=True) + return + ## test correctness -def test_crps_2pexponential(backend): - obs, scale1, scale2, location = 0.3, 0.1, 4.3, 0.0 - res = sr.crps_2pexponential(obs, scale1, scale2, location, backend=backend) - expected = 1.787032 + # single observation + res = sr.crps_binomial(8, 10, 0.9, backend=backend) + expected = 0.6685115 assert np.isclose(res, expected) - obs, scale1, scale2, location = -20.8, 7.1, 2.0, -25.4 - res = sr.crps_2pexponential(obs, scale1, scale2, location, backend=backend) - expected = 6.018359 + # negative observation + res = sr.crps_binomial(-8, 10, 0.9, backend=backend) + expected = 16.49896 assert np.isclose(res, expected) + # zero prob parameter + res = sr.crps_binomial(71, 212, 0, backend=backend, check_pars=True) + expected = 71 + assert np.isclose(res, expected) -def test_crps_gamma(backend): - obs, shape, rate = 0.2, 1.1, 0.7 - expected = 0.6343718 - - res = sr.crps_gamma(obs, shape, rate, backend=backend) + # observation larger than size + res = sr.crps_binomial(18, 10, 0.9, backend=backend) + expected = 8.498957 assert np.isclose(res, expected) - res = sr.crps_gamma(obs, shape, scale=1 / rate, backend=backend) + # non-integer observation + res = sr.crps_binomial(5.6, 10, 0.9, backend=backend) + expected = 2.901231 assert np.isclose(res, expected) - with pytest.raises(ValueError): - sr.crps_gamma(obs, shape, rate, scale=1 / rate, backend=backend) - return + # multiple observations + res = sr.crps_binomial(np.array([5.6, 17.2]), 10, 0.9, backend=backend) + expected = np.array([2.901231, 7.698957]) + assert np.allclose(res, expected) - with pytest.raises(ValueError): - sr.crps_gamma(obs, shape, backend=backend) - return +def test_crps_exponential(backend): + ## test shape -def test_crps_csg0(backend): - obs, shape, rate, shift = 0.7, 0.5, 2.0, 0.3 - expected = 0.5411044 + # one observation, multiple forecasts + res = sr.crps_exponential( + 7.2, + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - expected_gamma = sr.crps_gamma(obs, shape, rate, backend=backend) - res_gamma = sr.crps_csg0(obs, shape=shape, rate=rate, shift=0.0, backend=backend) - assert np.isclose(res_gamma, expected_gamma) + # multiple observations, one forecast + res = sr.crps_exponential( + np.random.uniform(0, 10, (4, 3)), + 7.2, + backend=backend, + ) + assert res.shape == (4, 3) - res = sr.crps_csg0(obs, shape=shape, rate=rate, shift=shift, backend=backend) - assert np.isclose(res, expected) + # multiple observations, multiple forecasts + res = sr.crps_exponential( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - res = sr.crps_csg0(obs, shape=shape, scale=1.0 / rate, shift=shift, backend=backend) - assert np.isclose(res, expected) + ## test exceptions + # negative rate with pytest.raises(ValueError): - sr.crps_csg0( - obs, shape=shape, rate=rate, scale=1.0 / rate, shift=shift, backend=backend - ) + sr.crps_exponential(3.2, -1, backend=backend, check_pars=True) return + # zero rate with pytest.raises(ValueError): - sr.crps_csg0(obs, shape=shape, shift=shift, backend=backend) + sr.crps_exponential(3.2, 0, backend=backend, check_pars=True) return + ## test correctness -def test_crps_gev(backend): - if backend == "torch": - pytest.skip("`expi` not implemented in torch backend") + # single observation + res = sr.crps_exponential(3, 0.7, backend=backend) + expected = 1.20701837 + assert np.isclose(res, expected) - obs, xi, mu, sigma = 0.3, 0.0, 0.0, 1.0 - assert np.isclose(sr.crps_gev(obs, xi, backend=backend), 0.276440963) - mu = 0.1 - assert np.isclose( - sr.crps_gev(obs + mu, xi, location=mu, backend=backend), 0.276440963 - ) - sigma = 0.9 - mu = 0.0 - assert np.isclose( - sr.crps_gev(obs * sigma, xi, scale=sigma, backend=backend), - 0.276440963 * sigma, - ) + # negative observation + res = sr.crps_exponential(-3, 0.7, backend=backend) + expected = 3.714286 + assert np.isclose(res, expected) - obs, xi, mu, sigma = 0.3, 0.7, 0.0, 1.0 - assert np.isclose(sr.crps_gev(obs, xi, backend=backend), 0.458044365) - mu = 0.1 - assert np.isclose( - sr.crps_gev(obs + mu, xi, location=mu, backend=backend), 0.458044365 - ) - sigma = 0.9 - mu = 0.0 - assert np.isclose( - sr.crps_gev(obs * sigma, xi, scale=sigma, backend=backend), - 0.458044365 * sigma, - ) + # multiple observations + res = sr.crps_exponential(np.array([5.6, 17.2]), 10.1, backend=backend) + expected = np.array([5.451485, 17.051485]) + assert np.allclose(res, expected) - obs, xi, mu, sigma = 0.3, -0.7, 0.0, 1.0 - assert np.isclose(sr.crps_gev(obs, xi, backend=backend), 0.207621488) - mu = 0.1 - assert np.isclose( - sr.crps_gev(obs + mu, xi, location=mu, backend=backend), 0.207621488 - ) - sigma = 0.9 - mu = 0.0 - assert np.isclose( - sr.crps_gev(obs * sigma, xi, scale=sigma, backend=backend), - 0.207621488 * sigma, - ) +def test_crps_exponentialM(backend): + ## test shape + + # one observation, multiple forecasts + # res = sr.crps_exponentialM( + # 6.2, + # np.random.uniform(0, 5, (4, 3)), + # np.random.uniform(0, 5, (4, 3)), + # np.random.uniform(0, 1, (4, 3)), + # backend=backend, + # ) + # assert res.shape == (4, 3) + # + + # multiple observations, one forecast + res = sr.crps_exponentialM( + np.random.uniform(0, 10, (4, 3)), + 2.4, + 3.5, + 0.1, + backend=backend, + ) -def test_crps_gpd(backend): - assert np.isclose(sr.crps_gpd(0.3, 0.9, backend=backend), 0.6849332) - assert np.isclose(sr.crps_gpd(-0.3, 0.9, backend=backend), 1.209091) - assert np.isclose(sr.crps_gpd(0.3, -0.9, backend=backend), 0.1338672) - assert np.isclose(sr.crps_gpd(-0.3, -0.9, backend=backend), 0.6448276) + # multiple observations, multiple forecasts + res = sr.crps_exponentialM( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - assert np.isnan(sr.crps_gpd(0.3, 1.0, backend=backend)) - assert np.isnan(sr.crps_gpd(0.3, 1.2, backend=backend)) - assert np.isnan(sr.crps_gpd(0.3, 0.9, mass=-0.1, backend=backend)) - assert np.isnan(sr.crps_gpd(0.3, 0.9, mass=1.1, backend=backend)) + ## test exceptions - res = 0.281636441 - assert np.isclose(sr.crps_gpd(0.3 + 0.1, 0.0, location=0.1, backend=backend), res) - assert np.isclose( - sr.crps_gpd(0.3 * 0.9, 0.0, scale=0.9, backend=backend), res * 0.9 - ) + # negative scale + with pytest.raises(ValueError): + sr.crps_exponentialM(3.2, -1, -1, 0.0, backend=backend, check_pars=True) + return + # zero scale + with pytest.raises(ValueError): + sr.crps_exponentialM(3.2, -1, 0.0, 0.0, backend=backend, check_pars=True) + return -def test_crps_gtclogis(backend): - obs, location, scale, lower, upper, lmass, umass = ( - 1.8, - -3.0, - 3.3, - -5.0, - 4.7, - 0.1, - 0.15, - ) - expected = 1.599721 - res = sr.crps_gtclogistic( - obs, location, scale, lower, upper, lmass, umass, backend=backend - ) - assert np.isclose(res, expected) + # negative mass + with pytest.raises(ValueError): + sr.crps_exponentialM(3.2, -1, 2, -0.1, backend=backend, check_pars=True) + return - # aligns with crps_logistic - res0 = sr.crps_logistic(obs, location, scale, backend=backend) - res = sr.crps_gtclogistic(obs, location, scale, backend=backend) - assert np.isclose(res, res0) + # mass larger than one + with pytest.raises(ValueError): + sr.crps_exponentialM(3.2, -1, 2, 2.1, backend=backend, check_pars=True) + return - # aligns with crps_tlogistic - res0 = sr.crps_tlogistic(obs, location, scale, lower, upper, backend=backend) - res = sr.crps_gtclogistic(obs, location, scale, lower, upper, backend=backend) - assert np.isclose(res, res0) + ## test correctness + # single observation + res = sr.crps_exponentialM(0.3, 0.0, 1.0, 0.1, backend=backend) + expected = 0.2384728 + assert np.isclose(res, expected) -def test_crps_tlogis(backend): - obs, location, scale, lower, upper = 4.9, 3.5, 2.3, 0.0, 20.0 - expected = 0.7658979 - res = sr.crps_tlogistic(obs, location, scale, lower, upper, backend=backend) + # negative location + res = sr.crps_exponentialM(0.3, -2.0, 3.0, 0.1, backend=backend) + expected = 0.6236187 assert np.isclose(res, expected) - # aligns with crps_logistic - res0 = sr.crps_logistic(obs, location, scale, backend=backend) - res = sr.crps_tlogistic(obs, location, scale, backend=backend) - assert np.isclose(res, res0) + # negative observation + res = sr.crps_exponentialM(-1.2, -2.0, 3.0, 0.1, backend=backend) + expected = 0.751013 + assert np.isclose(res, expected) + # observation smaller than location + res = sr.crps_exponentialM(-1.2, 2.0, 3.0, 0.1, backend=backend) + expected = 4.415 + assert np.isclose(res, expected) -def test_crps_clogis(backend): - obs, location, scale, lower, upper = -0.9, 0.4, 1.1, 0.0, 1.0 - expected = 1.13237 - res = sr.crps_clogistic(obs, location, scale, lower, upper, backend=backend) + # zero mass + res = sr.crps_exponentialM(-1.2, -2.0, 3.0, 0, backend=backend) + expected = 0.89557 assert np.isclose(res, expected) - # aligns with crps_logistic - res0 = sr.crps_logistic(obs, location, scale, backend=backend) - res = sr.crps_clogistic(obs, location, scale, backend=backend) + # multiple observations + res = sr.crps_exponentialM( + np.array([6.4, 2.2, 17.2]), + 10.1, + 4.1, + np.array([0.1, 0.2, 0.3]), + backend=backend, + ) + expected = np.array([5.360500, 9.212000, 3.380377]) + assert np.allclose(res, expected) + + # check equivalence with crps_exponential + res0 = sr.crps_exponential(8.2, 3, backend=backend) + res = sr.crps_exponentialM(8.2, 0, 1 / 3, 0, backend=backend) assert np.isclose(res, res0) -def test_crps_gtcnormal(backend): - obs, location, scale, lower, upper, lmass, umass = ( - 0.9, - -2.3, - 4.1, - -7.3, - 1.7, - 0.0, - 0.21, +def test_crps_2pexponential(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_2pexponential( + 6.2, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(-2, 2, (4, 3)), + backend=backend, ) - expected = 1.422805 - res = sr.crps_gtcnormal( - obs, location, scale, lower, upper, lmass, umass, backend=backend + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_2pexponential( + np.random.uniform(-5, 5, (4, 3)), + 2.4, + 10.1, + np.random.uniform(-2, 2, (4, 3)), + backend=backend, ) - assert np.isclose(res, expected) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_2pexponential( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(-2, 2, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - # aligns with crps_normal - res0 = sr.crps_normal(obs, location, scale, backend=backend) - res = sr.crps_gtcnormal(obs, location, scale, backend=backend) - assert np.isclose(res, res0) + ## test exceptions - # aligns with crps_tnormal - res0 = sr.crps_tnormal(obs, location, scale, lower, upper, backend=backend) - res = sr.crps_gtcnormal(obs, location, scale, lower, upper, backend=backend) - assert np.isclose(res, res0) + # negative scale parameters + with pytest.raises(ValueError): + sr.crps_2pexponential(3.2, -1, 2, 2.1, backend=backend, check_pars=True) + return + with pytest.raises(ValueError): + sr.crps_2pexponential(3.2, 1, -2, 2.1, backend=backend, check_pars=True) + return -def test_crps_tnormal(backend): - obs, location, scale, lower, upper = -1.0, 2.9, 2.2, 1.5, 17.3 - expected = 3.982434 - res = sr.crps_tnormal(obs, location, scale, lower, upper, backend=backend) - assert np.isclose(res, expected) + # zero scale parameters + with pytest.raises(ValueError): + sr.crps_2pexponential(3.2, 0, 2, 2.1, backend=backend, check_pars=True) + return - # aligns with crps_normal - res0 = sr.crps_normal(obs, location, scale, backend=backend) - res = sr.crps_tnormal(obs, location, scale, backend=backend) - assert np.isclose(res, res0) + with pytest.raises(ValueError): + sr.crps_2pexponential(3.2, 1, 0, 2.1, backend=backend, check_pars=True) + return + ## test correctness -def test_crps_cnormal(backend): - obs, location, scale, lower, upper = 1.8, 0.4, 1.1, 0.0, 2.0 - expected = 0.8296078 - res = sr.crps_cnormal(obs, location, scale, lower, upper, backend=backend) + # single observation + res = sr.crps_2pexponential(0.3, 0.1, 4.3, 0.0, backend=backend) + expected = 1.787032 assert np.isclose(res, expected) - # aligns with crps_normal - res0 = sr.crps_normal(obs, location, scale, backend=backend) - res = sr.crps_cnormal(obs, location, scale, backend=backend) - assert np.isclose(res, res0) - + # negative location + res = sr.crps_2pexponential(-20.8, 7.1, 2.0, -25.4, backend=backend) + expected = 6.018359 + assert np.isclose(res, expected) -def test_crps_gtct(backend): - if backend in ["jax", "torch", "tensorflow"]: - pytest.skip("Not implemented in jax, torch or tensorflow backends") - obs, df, location, scale, lower, upper, lmass, umass = ( - 0.9, - 20.1, - -2.3, - 4.1, - -7.3, - 1.7, - 0.0, - 0.21, - ) - expected = 1.423042 - res = sr.crps_gtct( - obs, df, location, scale, lower, upper, lmass, umass, backend=backend + # multiple observations + res = sr.crps_2pexponential( + np.array([-1.2, 3.7]), 0.1, 4.3, np.array([-0.1, 0.1]), backend=backend ) - assert np.isclose(res, expected) + expected = np.array([3.1488637, 0.8873341]) + assert np.allclose(res, expected) - # aligns with crps_t - res0 = sr.crps_t(obs, df, location, scale, backend=backend) - res = sr.crps_gtct(obs, df, location, scale, backend=backend) + # check equivalence with laplace distribution + res = sr.crps_2pexponential(-20.8, 2.0, 2.0, -25.4, backend=backend) + res0 = sr.crps_laplace(-20.8, -25.4, 2.0, backend=backend) assert np.isclose(res, res0) - # aligns with crps_tnormal - res0 = sr.crps_tt(obs, df, location, scale, lower, upper, backend=backend) - res = sr.crps_gtct(obs, df, location, scale, lower, upper, backend=backend) - assert np.isclose(res, res0) +def test_crps_gamma(backend): + ## test shape -def test_crps_tt(backend): - if backend in ["jax", "torch", "tensorflow"]: - pytest.skip("Not implemented in jax, torch or tensorflow backends") + # one observation, multiple forecasts + res = sr.crps_gamma( + 6.2, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - obs, df, location, scale, lower, upper = -1.0, 2.9, 3.1, 4.2, 1.5, 17.3 - expected = 5.084272 - res = sr.crps_tt(obs, df, location, scale, lower, upper, backend=backend) - assert np.isclose(res, expected) + # multiple observations, one forecast + res = sr.crps_gamma( + np.random.uniform(0, 10, (4, 3)), + 4.0, + 2.1, + backend=backend, + ) + assert res.shape == (4, 3) - # aligns with crps_t - res0 = sr.crps_t(obs, df, location, scale, backend=backend) - res = sr.crps_tt(obs, df, location, scale, backend=backend) - assert np.isclose(res, res0) + # multiple observations, multiple forecasts + res = sr.crps_gamma( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + ## test exceptions -def test_crps_ct(backend): - if backend in ["jax", "torch", "tensorflow"]: - pytest.skip("Not implemented in jax, torch or tensorflow backends") + # rate and scale provided + with pytest.raises(ValueError): + sr.crps_gamma(3.2, -1, rate=2, scale=0.4, backend=backend, check_pars=True) + return - obs, df, location, scale, lower, upper = 1.8, 5.4, 0.4, 1.1, 0.0, 2.0 - expected = 0.8028996 - res = sr.crps_ct(obs, df, location, scale, lower, upper, backend=backend) - assert np.isclose(res, expected) + # neither rate nor scale provided + with pytest.raises(ValueError): + sr.crps_gamma(3.2, -1, backend=backend, check_pars=True) + return - # aligns with crps_t - res0 = sr.crps_t(obs, df, location, scale, backend=backend) - res = sr.crps_ct(obs, df, location, scale, backend=backend) - assert np.isclose(res, res0) + # negative shape parameter + with pytest.raises(ValueError): + sr.crps_gamma(3.2, -1, 2, backend=backend, check_pars=True) + return + # negative rate/scale parameter + with pytest.raises(ValueError): + sr.crps_gamma(3.2, 1, -2, backend=backend, check_pars=True) + return -def test_crps_hypergeometric(backend): - if backend == "torch": - pytest.skip("Currently not working in torch backend") + # zero shape parameter + with pytest.raises(ValueError): + sr.crps_gamma(3.2, 0.0, 2, backend=backend, check_pars=True) + return - # test shapes - res = sr.crps_hypergeometric(5 * np.ones((2, 2)), 7, 13, 12, backend=backend) - assert res.shape == (2, 2) + ## test correctness - res = sr.crps_hypergeometric(5, 7 * np.ones((2, 2)), 13, 12, backend=backend) - assert res.shape == (2, 2) + # single observation + res = sr.crps_gamma(0.2, 1.1, 0.7, backend=backend) + expected = 0.6343718 + assert np.isclose(res, expected) - res = sr.crps_hypergeometric(5, 7, 13 * np.ones((2, 2)), 12, backend=backend) - assert res.shape == (2, 2) + # scale instead of rate + res = sr.crps_gamma(0.2, 1.1, scale=1 / 0.7, backend=backend) + assert np.isclose(res, expected) - # test correctness - assert np.isclose(sr.crps_hypergeometric(5, 7, 13, 12), 0.4469742) + # negative observation + res = sr.crps_gamma(-4.2, 1.1, rate=0.7, backend=backend) + expected = 5.014442 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_gamma(np.array([-1.2, 2.3]), 1.1, 0.7, backend=backend) + expected = np.array([2.0144417, 0.6500017]) + assert np.allclose(res, expected) + # check equivalence with exponential distribution + res0 = sr.crps_exponential(3.1, 2, backend=backend) + res = sr.crps_gamma(3.1, 1, 2, backend=backend) + assert np.isclose(res0, res) -def test_crps_laplace(backend): - assert np.isclose(sr.crps_laplace(-3, backend=backend), 2.29978707) - assert np.isclose( - sr.crps_laplace(-3 + 0.1, location=0.1, backend=backend), 2.29978707 + +def test_crps_csg0(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_csg0( + 6.2, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + shift=np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_csg0( + np.random.uniform(0, 5, (4, 3)), + 9.8, + 8.8, + shift=1.1, + backend=backend, ) - assert np.isclose( - sr.crps_laplace(-3 * 0.9, scale=0.9, backend=backend), 0.9 * 2.29978707 + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_csg0( + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + shift=np.random.uniform(0, 5, (4, 3)), + backend=backend, ) + assert res.shape == (4, 3) + ## test exceptions -def test_crps_logis(backend): - obs, mu, sigma = 17.1, 13.8, 3.3 - expected = 2.067527 - res = sr.crps_logistic(obs, mu, sigma, backend=backend) - assert np.isclose(res, expected) - - obs, mu, sigma = 3.1, 4.0, 0.5 - expected = 0.5529776 - res = sr.crps_logistic(obs, mu, sigma, backend=backend) - assert np.isclose(res, expected) + # rate and scale provided + with pytest.raises(ValueError): + sr.crps_csg0(0.7, 0.5, rate=2.0, scale=0.5, shift=0.3, backend=backend) + return + + # neither rate nor scale provided + with pytest.raises(ValueError): + sr.crps_csg0(0.7, 0.5, shift=0.3, backend=backend) + return + + # negative shape parameter + with pytest.raises(ValueError): + sr.crps_csg0(0.7, -0.5, 2.0, shift=2, backend=backend, check_pars=True) + return + + # negative rate/scale parameter + with pytest.raises(ValueError): + sr.crps_csg0(0.7, 0.5, -2.0, shift=2, backend=backend, check_pars=True) + return + + # zero shape parameter + with pytest.raises(ValueError): + sr.crps_csg0(0.7, 0, 2.0, shift=2, backend=backend, check_pars=True) + return + + ## test correctness + + obs, shape, rate, shift = 0.7, 0.5, 2.0, 0.3 + expected = 0.5411044 + + # single observation + res = sr.crps_csg0(obs, shape=shape, rate=rate, shift=shift, backend=backend) + assert np.isclose(res, expected) + + # scale instead of rate + res = sr.crps_csg0(obs, shape=shape, scale=1.0 / rate, shift=shift, backend=backend) + assert np.isclose(res, expected) + + # with and without shift parameter + res0 = sr.crps_csg0(obs, shape, rate, backend=backend) + res = sr.crps_csg0(obs, shape, rate, shift=0.0, backend=backend) + assert np.isclose(res0, res) + + # check equivalence with gamma distribution + res0 = sr.crps_gamma(obs, shape, rate, backend=backend) + assert np.isclose(res0, res) + + +def test_crps_gev(backend): + if backend == "torch": + pytest.skip("Not implemented in torch backend") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_gev( + 6.2, + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + 0.5, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_gev( + np.random.uniform(-5, 5, (4, 3)), + -4.9, + 2.3, + 0.7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_gev( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + 0.7, + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_gev(0.7, 0.5, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_gev(0.7, 0.5, 2.0, 0.0, backend=backend, check_pars=True) + return + + # shape parameter greater than one + with pytest.raises(ValueError): + sr.crps_gev(0.7, 1.5, 0.0, 0.5, backend=backend, check_pars=True) + return + + ## test correctness + + # test default location + res0 = sr.crps_gev(0.3, 0.0, backend=backend) + res = sr.crps_gev(0.3, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # test default scale + res = sr.crps_gev(0.3, 0.0, 0.0, 1.0, backend=backend) + assert np.isclose(res0, res) + + # test invariance to shift + mu = 0.1 + res = sr.crps_gev(0.3 + mu, 0.0, mu, 1.0, backend=backend) + assert np.isclose(res0, res) + + # test invariance to rescaling + sigma = 0.9 + res = sr.crps_gev(0.3 * sigma, 0.0, scale=sigma, backend=backend) + assert np.isclose(res0 * sigma, res) + + # positive shape parameter + res = sr.crps_gev(0.3, 0.7, backend=backend) + expected = 0.458044365 + assert np.isclose(res, expected) + + # negative shape parameter + res = sr.crps_gev(0.3, -0.7, backend=backend) + expected = 0.207621488 + assert np.isclose(res, expected) + + # zero shape parameter + res = sr.crps_gev(0.3, 0.0, backend=backend) + expected = 0.276441 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_gev(np.array([0.9, -2.1]), -0.7, 0, 1.4, backend=backend) + expected = np.array([0.3575414, 1.7008711]) + assert np.allclose(res, expected) + + +def test_crps_gpd(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_gpd( + 6.2, + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_gpd( + np.random.uniform(-5, 5, (4, 3)), + -4.9, + 2.3, + 0.7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_gpd( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_gpd(0.7, 0.5, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_gpd(0.7, 0.5, 2.0, 0.0, backend=backend, check_pars=True) + return + + # shape parameter greater than one + with pytest.raises(ValueError): + sr.crps_gpd(0.7, 1.5, 0.0, 0.5, backend=backend, check_pars=True) + return + + # mass parameter smaller than zero + with pytest.raises(ValueError): + sr.crps_gpd(0.7, 0.5, 0.0, 0.5, mass=-0.1, backend=backend, check_pars=True) + return + + # mass parameter smaller than zero + with pytest.raises(ValueError): + sr.crps_gpd(0.7, 0.5, 0.0, 0.5, mass=-0.1, backend=backend, check_pars=True) + return + + # mass parameter greater than one + with pytest.raises(ValueError): + sr.crps_gpd(0.7, 0.5, 0.0, 0.5, mass=1.1, backend=backend, check_pars=True) + return + + ## test correctness + + # test default location + res0 = sr.crps_gpd(0.3, 0.0, backend=backend) + res = sr.crps_gpd(0.3, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # test default scale + res = sr.crps_gpd(0.3, 0.0, 0.0, 1.0, backend=backend) + assert np.isclose(res0, res) + + # test default mass + res = sr.crps_gpd(0.3, 0.0, 0.0, 1.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # test invariance to shift + res = sr.crps_gpd(0.3 + 0.1, 0.0, 0.1, 1.0, backend=backend) + assert np.isclose(res0, res) + + # test invariance to rescaling + res0 = sr.crps_gpd((0.3 - 0.1) / 0.8, 0.4, backend=backend) + res = sr.crps_gpd(0.3, 0.4, 0.1, 0.8, backend=backend) + assert np.isclose(res0 * 0.8, res) + + # single observation + res = sr.crps_gpd(0.3, 0.9, backend=backend) + expected = 0.6849332 + assert np.isclose(res, expected) + + # negative observation + res = sr.crps_gpd(-0.3, 0.9, backend=backend) + expected = 1.209091 + assert np.isclose(res, expected) + + # negative shape parameter + res = sr.crps_gpd(0.3, -0.9, backend=backend) + expected = 0.1338672 + assert np.isclose(res, expected) + + # non-zero mass parameter + res = sr.crps_gpd(-0.3, -0.9, -2.1, 10.0, mass=0.3, backend=backend) + expected = 1.195042 + assert np.isclose(res, expected) + + # mass parameter equal to one + res = sr.crps_gpd(-0.3, -0.9, -2.1, 10.0, mass=1.0, backend=backend) + expected = 1.8 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_gpd(np.array([0.9, -2.1]), -0.7, 0, 1.4, backend=backend) + expected = np.array([0.1570813, 2.6185185]) + assert np.allclose(res, expected) + + +def test_crps_gtclogis(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_gtclogistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_gtclogistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_gtclogistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_gtclogistic(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_gtclogistic(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # negative lower mass parameter + with pytest.raises(ValueError): + sr.crps_gtclogistic( + 0.7, 2.0, 0.4, lower=-1, lmass=-0.4, backend=backend, check_pars=True + ) + return + + # lower mass parameter larger than one + with pytest.raises(ValueError): + sr.crps_gtclogistic( + 0.7, 2.0, 0.4, lower=-1, lmass=1.4, backend=backend, check_pars=True + ) + return + + # negative upper mass parameter + with pytest.raises(ValueError): + sr.crps_gtclogistic( + 0.7, 2.0, 0.4, upper=4, umass=-0.4, backend=backend, check_pars=True + ) + return + + # upper mass parameter larger than one + with pytest.raises(ValueError): + sr.crps_gtclogistic( + 0.7, 2.0, 0.4, upper=4, umass=1.4, backend=backend, check_pars=True + ) + return + + # sum of lower and upper mass parameters larger than one + with pytest.raises(ValueError): + sr.crps_gtclogistic( + 0.7, + 2.0, + 0.4, + lower=-1, + upper=4, + lmass=0.7, + umass=0.4, + backend=backend, + check_pars=True, + ) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_gtclogistic( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_gtclogistic(1.8, backend=backend) + res = sr.crps_gtclogistic(1.8, 0.0, 1.0, -np.inf, np.inf, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_gtclogistic(1.8, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 1.599721 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_gtclogistic(-5.8, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 3.393796 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_gtclogistic(7.8, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 6.378633 + assert np.isclose(res, expected) + + # mass on lower bound equal to one + res = sr.crps_gtclogistic(7.8, -3.0, 3.3, -5.0, 4.7, 1.0, 0.0, backend=backend) + expected = 12.8 + assert np.isclose(res, expected) + + # mass on upper bound equal to one + res = sr.crps_gtclogistic(7.8, -3.0, 3.3, -5.0, 4.7, 0.0, 1.0, backend=backend) + expected = 3.1 + assert np.isclose(res, expected) + + # lower bound equal to infinite + res = sr.crps_gtclogistic( + 7.8, -3.0, 3.3, float("-inf"), 4.7, 0.0, 0.15, backend=backend + ) + expected = 7.444611 + assert np.isclose(res, expected) + + # upper bound equal to infinite + res = sr.crps_gtclogistic( + 7.8, -3.0, 3.3, -5.0, float("inf"), 0.15, 0.0, backend=backend + ) + expected = 6.339197 + assert np.isclose(res, expected) + + # aligns with crps_logistic + res0 = sr.crps_logistic(1.8, -3.0, 3.3, backend=backend) + res = sr.crps_gtclogistic(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # aligns with crps_tlogistic + res0 = sr.crps_tlogistic(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + res = sr.crps_gtclogistic(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_gtclogistic( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, 0.5, 0.0, backend=backend + ) + expected = np.array([3.627106, 3.270710]) + assert np.allclose(res, expected) + + +def test_crps_tlogis(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_tlogistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_tlogistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_tlogistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_tlogistic(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_tlogistic(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_tlogistic( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_tlogistic(1.8, backend=backend) + res = sr.crps_tlogistic(1.8, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_tlogistic(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 1.72305 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_tlogistic(-5.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 3.395149 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_tlogistic(7.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 7.254933 + assert np.isclose(res, expected) + + # aligns with crps_logistic + res0 = sr.crps_logistic(1.8, -3.0, 3.3, backend=backend) + res = sr.crps_tlogistic(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_tlogistic( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([7.932799, 11.320006]) + assert np.allclose(res, expected) + + +def test_crps_clogis(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_clogistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_clogistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_clogistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_clogistic(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_clogistic(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_clogistic( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_clogistic(1.8, backend=backend) + res = sr.crps_clogistic(1.8, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_clogistic(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.599499 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_clogistic(-5.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.087692 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_clogistic(7.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 7.825271 + assert np.isclose(res, expected) + + # aligns with crps_logistic + res0 = sr.crps_logistic(1.8, -3.0, 3.3, backend=backend) + res = sr.crps_clogistic(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_clogistic( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([5.102037, 6.729603]) + assert np.allclose(res, expected) + + +def test_crps_gtcnormal(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_gtcnormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_gtcnormal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_gtcnormal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_gtcnormal(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_gtcnormal(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # negative lower mass parameter + with pytest.raises(ValueError): + sr.crps_gtcnormal( + 0.7, 2.0, 0.4, lower=-1, lmass=-0.4, backend=backend, check_pars=True + ) + return + + # lower mass parameter larger than one + with pytest.raises(ValueError): + sr.crps_gtcnormal( + 0.7, 2.0, 0.4, lower=-1, lmass=1.4, backend=backend, check_pars=True + ) + return + + # negative upper mass parameter + with pytest.raises(ValueError): + sr.crps_gtcnormal( + 0.7, 2.0, 0.4, upper=4, umass=-0.4, backend=backend, check_pars=True + ) + return + + # upper mass parameter larger than one + with pytest.raises(ValueError): + sr.crps_gtcnormal( + 0.7, 2.0, 0.4, upper=4, umass=1.4, backend=backend, check_pars=True + ) + return + + # sum of lower and upper mass parameters larger than one + with pytest.raises(ValueError): + sr.crps_gtcnormal( + 0.7, + 2.0, + 0.4, + lower=-1, + upper=4, + lmass=0.7, + umass=0.4, + backend=backend, + check_pars=True, + ) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_gtcnormal( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_gtcnormal(1.8, backend=backend) + res = sr.crps_gtcnormal(1.8, 0.0, 1.0, -np.inf, np.inf, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_gtcnormal(1.8, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 1.986411 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_gtcnormal(-5.8, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 2.993124 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_gtcnormal(7.8, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 6.974827 + assert np.isclose(res, expected) + + # mass on lower bound equal to one + res = sr.crps_gtcnormal(7.8, -3.0, 3.3, -5.0, 4.7, 1.0, 0.0, backend=backend) + expected = 12.8 + assert np.isclose(res, expected) + + # mass on upper bound equal to one + res = sr.crps_gtcnormal(7.8, -3.0, 3.3, -5.0, 4.7, 0.0, 1.0, backend=backend) + expected = 3.1 + assert np.isclose(res, expected) + + # aligns with crps_normal + res0 = sr.crps_normal(1.8, -3.0, 3.3, backend=backend) + res = sr.crps_gtcnormal(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # aligns with crps_tnormal + res0 = sr.crps_tnormal(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + res = sr.crps_gtcnormal(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_gtcnormal( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, 0.5, 0.0, backend=backend + ) + expected = np.array([3.019942, 2.637561]) + assert np.allclose(res, expected) + + +def test_crps_tnormal(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_tnormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_tnormal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_tnormal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_tnormal(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_tnormal(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_tnormal( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_tnormal(1.8, backend=backend) + res = sr.crps_tnormal(1.8, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_tnormal(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.326391 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_tnormal(-5.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.948674 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_tnormal(7.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 8.137612 + assert np.isclose(res, expected) + + # aligns with crps_normal + res0 = sr.crps_normal(1.8, -3.0, 3.3, backend=backend) + res = sr.crps_tnormal(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_tnormal( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([5.452673, 8.787913]) + assert np.allclose(res, expected) + + +def test_crps_cnormal(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_cnormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_cnormal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_cnormal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_cnormal(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_cnormal(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_cnormal( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_cnormal(1.8, backend=backend) + res = sr.crps_cnormal(1.8, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_cnormal(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 3.068522 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_cnormal(-5.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 1.956462 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_cnormal(7.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 8.87606 + assert np.isclose(res, expected) + + # aligns with crps_normal + res0 = sr.crps_cnormal(1.8, -3.0, 3.3, backend=backend) + res = sr.crps_cnormal(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_cnormal( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([4.401269, 6.851981]) + assert np.allclose(res, expected) + + +def test_crps_gtct(backend): + if backend in ["jax", "torch", "tensorflow"]: + pytest.skip("Not implemented in jax, torch or tensorflow backends") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_gtct( + 17, + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_gtct( + np.random.uniform(-10, 10, (4, 3)), + 11.1, + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_gtct( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative df parameter + with pytest.raises(ValueError): + sr.crps_gtct(0.7, -1.7, 2.0, 0.5, backend=backend, check_pars=True) + return + + # zero df parameter + with pytest.raises(ValueError): + sr.crps_gtct(0.7, 0.0, 2.0, 0.5, backend=backend, check_pars=True) + return + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_gtct(0.7, 4.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_gtct(0.7, 4.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # negative lower mass parameter + with pytest.raises(ValueError): + sr.crps_gtct( + 0.7, 4.7, 2.0, 0.4, lower=-1, lmass=-0.4, backend=backend, check_pars=True + ) + return + + # lower mass parameter larger than one + with pytest.raises(ValueError): + sr.crps_gtct( + 0.7, 4.7, 2.0, 0.4, lower=-1, lmass=1.4, backend=backend, check_pars=True + ) + return + + # negative upper mass parameter + with pytest.raises(ValueError): + sr.crps_gtct( + 0.7, 4.7, 2.0, 0.4, upper=4, umass=-0.4, backend=backend, check_pars=True + ) + return + + # upper mass parameter larger than one + with pytest.raises(ValueError): + sr.crps_gtct( + 0.7, 4.7, 2.0, 0.4, upper=4, umass=1.4, backend=backend, check_pars=True + ) + return + + # sum of lower and upper mass parameters larger than one + with pytest.raises(ValueError): + sr.crps_gtct( + 0.7, + 4.7, + 2.0, + 0.4, + lower=-1, + upper=4, + lmass=0.7, + umass=0.4, + backend=backend, + check_pars=True, + ) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_gtct( + 0.7, 4.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_gtct(1.8, 6.9, backend=backend) + res = sr.crps_gtct(1.8, 6.9, 0.0, 1.0, -np.inf, np.inf, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_gtct(1.8, 6.9, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 1.963135 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_gtct(-5.8, 6.9, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 3.016045 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_gtct(7.8, 6.9, -3.0, 3.3, -5.0, 4.7, 0.1, 0.15, backend=backend) + expected = 6.921271 + assert np.isclose(res, expected) + + # mass on lower bound equal to one + res = sr.crps_gtct(7.8, 6.9, -3.0, 3.3, -5.0, 4.7, 1.0, 0.0, backend=backend) + expected = 12.8 + assert np.isclose(res, expected) + + # mass on upper bound equal to one + res = sr.crps_gtct(7.8, 6.9, -3.0, 3.3, -5.0, 4.7, 0.0, 1.0, backend=backend) + expected = 3.1 + assert np.isclose(res, expected) + + # aligns with crps_t + res0 = sr.crps_t(1.8, 6.9, -3.0, 3.3, backend=backend) + res = sr.crps_gtct(1.8, 6.9, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # aligns with crps_tt + res0 = sr.crps_tt(1.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + res = sr.crps_gtct(1.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_gtct( + np.array([1.8, -2.3]), 6.9, 9.1, 11.1, -4.0, np.inf, 0.5, 0.0, backend=backend + ) + expected = np.array([3.085881, 2.720847]) + assert np.allclose(res, expected) + + +def test_crps_tt(backend): + if backend in ["jax", "torch", "tensorflow"]: + pytest.skip("Not implemented in jax, torch or tensorflow backends") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_tt( + 17, + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_tt( + np.random.uniform(-10, 10, (4, 3)), + 11.1, + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_tt( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative df parameter + with pytest.raises(ValueError): + sr.crps_tt(0.7, -1.7, 2.0, 0.5, backend=backend, check_pars=True) + return + + # zero df parameter + with pytest.raises(ValueError): + sr.crps_tt(0.7, 0.0, 2.0, 0.5, backend=backend, check_pars=True) + return + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_tt(0.7, 4.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_tt(0.7, 4.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_tt( + 0.7, 4.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_tt(1.8, 6.9, backend=backend) + res = sr.crps_tt(1.8, 6.9, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_tt(1.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.285149 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_tt(-5.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.969029 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_tt(7.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 8.055997 + assert np.isclose(res, expected) + + # aligns with crps_t + res0 = sr.crps_t(1.8, 6.9, -3.0, 3.3, backend=backend) + res = sr.crps_tt(1.8, 6.9, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_tt( + np.array([1.8, -2.3]), 6.9, 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([5.753912, 9.123845]) + assert np.allclose(res, expected) + + +def test_crps_ct(backend): + if backend in ["jax", "torch", "tensorflow"]: + pytest.skip("Not implemented in jax, torch or tensorflow backends") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_ct( + 17, + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_ct( + np.random.uniform(-10, 10, (4, 3)), + 11.1, + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_ct( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative df parameter + with pytest.raises(ValueError): + sr.crps_ct(0.7, -1.7, 2.0, 0.5, backend=backend, check_pars=True) + return + + # zero df parameter + with pytest.raises(ValueError): + sr.crps_ct(0.7, 0.0, 2.0, 0.5, backend=backend, check_pars=True) + return + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_ct(0.7, 4.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_ct(0.7, 4.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.crps_ct( + 0.7, 4.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.crps_ct(1.8, 6.9, backend=backend) + res = sr.crps_ct(1.8, 6.9, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_ct(1.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.988432 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.crps_ct(-5.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 1.970734 + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.crps_ct(7.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 8.676605 + assert np.isclose(res, expected) + + # aligns with crps_t + res0 = sr.crps_t(1.8, 6.9, -3.0, 3.3, backend=backend) + res = sr.crps_ct(1.8, 6.9, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.crps_ct( + np.array([1.8, -2.3]), 6.9, 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([4.475883, 6.811192]) + assert np.allclose(res, expected) + + +def test_crps_hypergeometric(backend): + if backend == "torch": + pytest.skip("Currently not working in torch backend") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_hypergeometric( + 17, + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(1, 10, size=(4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_hypergeometric( + np.random.randint(0, 20, size=(4, 3)), + 13, + 4, + 7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_hypergeometric( + np.random.randint(0, 20, size=(4, 3)), + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(1, 10, size=(4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative m parameter + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=-1, n=10, k=5, backend=backend, check_pars=True) + return + + # negative n parameter + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=11, n=-2, k=5, backend=backend, check_pars=True) + return + + # negative k parameter + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=11, n=10, k=-5, backend=backend, check_pars=True) + return + + # k > m + n + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=11, n=10, k=25, backend=backend, check_pars=True) + return + + # non-integer m parameter + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=11.1, n=10, k=5, backend=backend, check_pars=True) + return + + # non-integer n parameter + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=11, n=10.8, k=5, backend=backend, check_pars=True) + return + + # non-integer k parameter + with pytest.raises(ValueError): + sr.crps_hypergeometric(7, m=11, n=10, k=4.3, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.crps_hypergeometric(5, 7, 13, 12, backend=backend) + expected = 0.4469742 + assert np.isclose(res, expected) + + # negative observation + res = sr.crps_hypergeometric(-5, 7, 13, 12, backend=backend) + expected = 8.615395 + assert np.isclose(res, expected) + + # non-integer observation + res = sr.crps_hypergeometric(5.6, 7, 13, 12, backend=backend) + expected = 0.9202868 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_hypergeometric(np.array([2, 7.6, -2.1]), 7, 13, 3, backend=backend) + expected = np.array([0.5965259, 6.1351223, 2.7351223]) + assert np.allclose(res, expected) + + +def test_crps_laplace(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_laplace( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_laplace( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_laplace( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_laplace(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_laplace(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default location + res0 = sr.crps_laplace(-3, backend=backend) + res = sr.crps_laplace(-3, location=0, backend=backend) + assert np.isclose(res0, res) + + # default scale + res = sr.crps_laplace(-3, location=0, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.crps_laplace(-3 + 0.1, location=0.1, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to rescaling + res0 = sr.crps_laplace((-3 - 0.1) / 0.8, backend=backend) + res = sr.crps_laplace(-3, location=0.1, scale=0.8, backend=backend) + assert np.isclose(res0 * 0.8, res) + + # single observation + res = sr.crps_laplace(-2.9, 0.1, backend=backend) + expected = 2.29978707 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_laplace( + np.array([-2.9, 4.1]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([3.222098, 1.576516]) + assert np.allclose(res, expected) + + +def test_crps_logis(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_logistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_logistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_logistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_logistic(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_logistic(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default location + res0 = sr.crps_logistic(-3, backend=backend) + res = sr.crps_logistic(-3, location=0, backend=backend) + assert np.isclose(res0, res) + + # default scale + res = sr.crps_logistic(-3, location=0, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.crps_logistic(-3 + 0.1, location=0.1, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to rescaling + res0 = sr.crps_logistic((-3 - 0.1) / 0.8, backend=backend) + res = sr.crps_logistic(-3, location=0.1, scale=0.8, backend=backend) + assert np.isclose(res0 * 0.8, res) + + # single observation + res = sr.crps_logistic(17.1, 13.8, 3.3, backend=backend) + expected = 2.067527 + assert np.isclose(res, expected) + + res = sr.crps_logistic(3.1, 4.0, 0.5, backend=backend) + expected = 0.5529776 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_logistic( + np.array([-2.9, 4.1]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([4.295051, 1.429361]) + assert np.allclose(res, expected) def test_crps_loglaplace(backend): - assert np.isclose(sr.crps_loglaplace(3.0, 0.1, 0.9, backend=backend), 1.16202051) + ## test shape + + # one observation, multiple forecasts + res = sr.crps_loglaplace( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_loglaplace( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 0.9, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_loglaplace( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_loglaplace(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_loglaplace(7, 4, 0.0, backend=backend, check_pars=True) + return + + # scale parameter larger than one + with pytest.raises(ValueError): + sr.crps_loglaplace(7, 4, 1.1, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.crps_loglaplace(7.1, 3.8, 0.3, backend=backend) + expected = 30.71884 + assert np.isclose(res, expected) + + # negative observation + res = sr.crps_loglaplace(-4.1, -3.8, 0.3, backend=backend) + expected = 4.118925 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_loglaplace( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([0.1, 0.9]), + backend=backend, + ) + expected = np.array([0.09159719, 1.95400976]) + assert np.allclose(res, expected) + + +def test_crps_loglogistic(backend): + if backend == "torch": + pytest.skip("Not implemented in torch backend") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_loglogistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_loglogistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 0.9, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_loglogistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_loglogistic(7, 4, -0.1, backend=backend, check_pars=True) + return -def test_crps_loglogistic(backend): - if backend == "torch": - pytest.skip("Not implemented in torch backend") + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_loglogistic(7, 4, 0.0, backend=backend, check_pars=True) + return + + # scale parameter larger than one + with pytest.raises(ValueError): + sr.crps_loglogistic(7, 4, 1.1, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.crps_loglogistic(7.1, 3.8, 0.3, backend=backend) + expected = 29.35987 + assert np.isclose(res, expected) + + # negative observation + res = sr.crps_loglogistic(-4.1, -3.8, 0.3, backend=backend) + expected = 4.118243 + assert np.isclose(res, expected) - # TODO: investigate why JAX results are different from other backends - # (would fail test with smaller tolerance) - assert np.isclose( - sr.crps_loglogistic(3.0, 0.1, 0.9, backend=backend), 1.13295277, atol=1e-4 + # multiple observations + res = sr.crps_loglogistic( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([0.1, 0.9]), + backend=backend, ) + expected = np.array([0.1256149, 3.1692104]) + assert np.allclose(res, expected) def test_crps_lognormal(backend): - obs = np.exp(np.random.randn(N)) - mulog = np.log(obs) + np.random.randn(N) * 0.1 - sigmalog = abs(np.random.randn(N)) * 0.3 + ## test shape - # non-negative values - res = sr.crps_lognormal(obs, mulog, sigmalog, backend=backend) - res = np.asarray(res) - assert not np.any(np.isnan(res)) - assert not np.any(res < 0.0) + # one observation, multiple forecasts + res = sr.crps_lognormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - # approx zero when perfect forecast - mulog = np.log(obs) + np.random.randn(N) * 1e-6 - sigmalog = abs(np.random.randn(N)) * 1e-6 - res = sr.crps_lognormal(obs, mulog, sigmalog, backend=backend) - res = np.asarray(res) - assert not np.any(np.isnan(res)) - assert not np.any(res - 0.0 > 0.0001) + # multiple observations, one forecast + res = sr.crps_lognormal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 2.9, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_lognormal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_lognormal(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_lognormal(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.crps_lognormal(7.1, 3.8, 0.3, backend=backend) + expected = 31.80341 + assert np.isclose(res, expected) + + # negative observation + res = sr.crps_lognormal(-4.1, -3.8, 0.3, backend=backend) + expected = 4.119469 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_lognormal( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([0.1, 0.9]), + backend=backend, + ) + expected = np.array([0.08472861, 1.63688414]) + assert np.allclose(res, expected) def test_crps_mixnorm(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.crps_mixnorm( + 17, + np.random.uniform(-5, 5, (4, 3, 5)), + np.random.uniform(0, 5, (4, 3, 5)), + np.random.uniform(0, 1, (4, 3, 5)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_mixnorm( + np.random.uniform(-5, 5, (4, 3)), + np.array([0.0, -2.9, 0.9, 3.2, 7.0]), + np.array([0.6, 2.9, 0.9, 3.2, 7.0]), + np.array([0.6, 2.9, 0.9, 0.2, 0.1]), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_mixnorm( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-5, 5, (4, 3, 5)), + np.random.uniform(0, 5, (4, 3, 5)), + np.random.uniform(0, 1, (4, 3, 5)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_mixnorm( + 7, np.array([4, 2]), np.array([-1, 2]), backend=backend, check_pars=True + ) + return + + # negative weight parameter + with pytest.raises(ValueError): + sr.crps_mixnorm( + 7, + np.array([4, 2]), + np.array([1, 2]), + np.array([-1, 2]), + backend=backend, + check_pars=True, + ) + return + + ## test correctness + + # single observation obs, m, s, w = 0.3, [0.0, -2.9, 0.9], [0.5, 1.4, 0.7], [1 / 3, 1 / 3, 1 / 3] res = sr.crps_mixnorm(obs, m, s, w, backend=backend) expected = 0.4510451 assert np.isclose(res, expected) + # default weights res0 = sr.crps_mixnorm(obs, m, s, backend=backend) assert np.isclose(res, res0) + # non-constant weights w = [0.3, 0.1, 0.6] res = sr.crps_mixnorm(obs, m, s, w, backend=backend) expected = 0.2354619 assert np.isclose(res, expected) + # weights that do not sum to one + w = [8.3, 0.1, 4.6] + res = sr.crps_mixnorm(obs, m, s, w, backend=backend) + expected = 0.1678256 + assert np.isclose(res, expected) + + # m-axis argument obs = [-1.6, 0.3] m = [[0.0, -2.9], [0.6, 0.0], [-1.1, -2.3]] s = [[0.5, 1.7], [1.1, 0.7], [1.4, 1.5]] @@ -621,114 +2481,535 @@ def test_crps_mixnorm(backend): res2 = sr.crps_mixnorm(obs, m, s, backend=backend) assert np.allclose(res1, res2) + # check equality with normal distribution + obs = 1.6 + m, s, w = [1.8, 1.8], [2.3, 2.3], [0.6, 0.8] + res0 = sr.crps_normal(obs, m[0], s[0], backend=backend) + res = sr.crps_mixnorm(obs, m, s, w, backend=backend) + assert np.isclose(res0, res) + def test_crps_negbinom(backend): if backend in ["jax", "torch", "tensorflow"]: pytest.skip("Not implemented in jax, torch or tensorflow backends") - # test exceptions + ## test shape + + # one observation, multiple forecasts + res = sr.crps_negbinom( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_negbinom( + np.random.uniform(0, 20, (4, 3)), + 12, + 0.2, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_negbinom( + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # prob and mu are both provided with pytest.raises(ValueError): sr.crps_negbinom(0.3, 7.0, 0.8, mu=7.3, backend=backend) - # test correctness - obs, n, prob = 2.0, 7.0, 0.8 - res = sr.crps_negbinom(obs, n, prob, backend=backend) + # neither prob nor mu are provided + with pytest.raises(ValueError): + sr.crps_negbinom(0.3, 8, backend=backend) + + # negative size parameter + with pytest.raises(ValueError): + sr.crps_negbinom(4.3, -8, 0.8, backend=backend, check_pars=True) + + # negative mu parameter + with pytest.raises(ValueError): + sr.crps_negbinom(4.3, 8, mu=-0.8, backend=backend, check_pars=True) + + # negative prob parameter + with pytest.raises(ValueError): + sr.crps_negbinom(4.3, 8, -0.8, backend=backend, check_pars=True) + + # prob parameter larger than one + with pytest.raises(ValueError): + sr.crps_negbinom(4.3, 8, 1.8, backend=backend, check_pars=True) + + ## test correctness + + # single observation + res = sr.crps_negbinom(2.0, 7.0, 0.8, backend=backend) expected = 0.3834322 assert np.isclose(res, expected) - obs, n, prob = 1.5, 2.0, 0.5 - res = sr.crps_negbinom(obs, n, prob, backend=backend) - expected = 0.462963 + # non-integer size + res = sr.crps_negbinom(1.5, 2.4, 0.5, backend=backend) + expected = 0.5423484 assert np.isclose(res, expected) - obs, n, prob = -1.0, 17.0, 0.1 - res = sr.crps_negbinom(obs, n, prob, backend=backend) + # negative observation + res = sr.crps_negbinom(-1.0, 17.0, 0.1, backend=backend) expected = 132.0942 assert np.isclose(res, expected) - obs, n, mu = 2.3, 11.0, 7.3 - res = sr.crps_negbinom(obs, n, mu=mu, backend=backend) + # mu given instead of prob + res = sr.crps_negbinom(2.3, 11.0, mu=7.3, backend=backend) expected = 3.149218 assert np.isclose(res, expected) + # multiple observations + res = sr.crps_negbinom(np.array([1.9, 7.3]), 7.0, 0.8, backend=backend) + expected = np.array([0.3827689, 4.7630042]) + assert np.allclose(res, expected) + def test_crps_normal(backend): - obs = np.random.randn(N) - mu = obs + np.random.randn(N) * 0.1 - sigma = abs(np.random.randn(N)) * 0.3 + ## test shape - # non-negative values - res = sr.crps_normal(obs, mu, sigma, backend=backend) - res = np.asarray(res) - assert not np.any(np.isnan(res)) - assert not np.any(res < 0.0) + # one observation, multiple forecasts + res = sr.crps_normal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - # approx zero when perfect forecast - mu = obs + np.random.randn(N) * 1e-6 - sigma = abs(np.random.randn(N)) * 1e-6 - res = sr.crps_normal(obs, mu, sigma, backend=backend) - res = np.asarray(res) + # multiple observations, one forecast + res = sr.crps_normal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) - assert not np.any(np.isnan(res)) - assert not np.any(res - 0.0 > 0.0001) + # multiple observations, multiple forecasts + res = sr.crps_normal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_normal(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_normal(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default mu + res0 = sr.crps_normal(-3, backend=backend) + res = sr.crps_normal(-3, mu=0, backend=backend) + assert np.isclose(res0, res) + + # default sigma + res = sr.crps_normal(-3, mu=0, sigma=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.crps_normal(-3 + 0.1, mu=0.1, sigma=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to rescaling + res0 = sr.crps_normal((-3 - 0.1) / 0.8, backend=backend) + res = sr.crps_normal(-3, mu=0.1, sigma=0.8, backend=backend) + assert np.isclose(res0 * 0.8, res) + + # single observation + res = sr.crps_normal(7.1, 3.8, 0.3, backend=backend) + expected = 3.130743 + assert np.isclose(res, expected) + + res = sr.crps_normal(3.1, 4.0, 0.5, backend=backend) + expected = 0.6321808 + assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_normal( + np.array([-2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([2.984174, 1.935862]) + assert np.allclose(res, expected) def test_crps_2pnormal(backend): - obs = np.random.randn(N) - mu = obs + np.random.randn(N) * 0.1 - sigma1 = abs(np.random.randn(N)) * 0.3 - sigma2 = abs(np.random.randn(N)) * 0.2 + ## test shape + + # one observation, multiple forecasts + res = sr.crps_2pnormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - res = sr.crps_2pnormal(obs, sigma1, sigma2, mu, backend=backend) - res = np.asarray(res) - assert not np.any(np.isnan(res)) - assert not np.any(res < 0.0) + # multiple observations, one forecast + res = sr.crps_2pnormal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + 2.8, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_2pnormal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale1 parameter + with pytest.raises(ValueError): + sr.crps_2pnormal(7, -0.1, 0.1, backend=backend, check_pars=True) + return + + # zero scale1 parameter + with pytest.raises(ValueError): + sr.crps_2pnormal(7, 0.0, backend=backend, check_pars=True) + return + + # negative scale2 parameter + with pytest.raises(ValueError): + sr.crps_2pnormal(7, 0.1, -2.1, backend=backend, check_pars=True) + return + + # zero scale2 parameter + with pytest.raises(ValueError): + sr.crps_2pnormal(7, 1.1, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default scale1 + res0 = sr.crps_2pnormal(-3, backend=backend) + res = sr.crps_2pnormal(-3, scale1=1.0, backend=backend) + assert np.isclose(res0, res) + + # default scale2 + res = sr.crps_2pnormal(-3, scale1=1.0, scale2=1.0, backend=backend) + assert np.isclose(res0, res) + + # default location + res = sr.crps_2pnormal(-3, scale1=1.0, scale2=1.0, location=0.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.crps_2pnormal(-3 + 0.1, 1.0, 1.0, location=0.1, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_2pnormal(7.1, 3.8, 0.3, -2.0, backend=backend) + expected = 10.5914 + assert np.isclose(res, expected) + + # check equivalence with standard normal distribution + res0 = sr.crps_normal(3.1, 4.0, 0.5, backend=backend) + res = sr.crps_2pnormal(3.1, 0.5, 0.5, 4.0, backend=backend) + assert np.isclose(res0, res) + + # multiple observations + res = sr.crps_2pnormal( + np.array([-2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([6.572028, 4.026696]) + assert np.allclose(res, expected) def test_crps_poisson(backend): - obs, mean = 1.0, 3.0 - res = sr.crps_poisson(obs, mean, backend=backend) + ## test shape + + # one observation, multiple forecasts + res = sr.crps_poisson( + 17, + np.random.uniform(0, 20, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_poisson( + np.random.uniform(-5, 10, (4, 3)), + 3.2, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_poisson( + np.random.uniform(-5, 10, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative mean parameter + with pytest.raises(ValueError): + sr.crps_poisson(7, -0.1, backend=backend, check_pars=True) + return + + # zero mean parameter + with pytest.raises(ValueError): + sr.crps_poisson(7, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.crps_poisson(1.0, 3.0, backend=backend) expected = 1.143447 assert np.isclose(res, expected) - obs, mean = 1.5, 2.3 - res = sr.crps_poisson(obs, mean, backend=backend) + res = sr.crps_poisson(1.5, 2.3, backend=backend) expected = 0.5001159 assert np.isclose(res, expected) - obs, mean = -1.0, 1.5 - res = sr.crps_poisson(obs, mean, backend=backend) + # negative observation + res = sr.crps_poisson(-1.0, 1.5, backend=backend) expected = 1.840259 assert np.isclose(res, expected) + # multiple observations + res = sr.crps_poisson(np.array([-2.9, 4.4]), np.array([10.1, 1.2]), backend=backend) + expected = np.array([11.218179, 2.630758]) + assert np.allclose(res, expected) + def test_crps_t(backend): - if backend in ["jax", "torch", "tensorflow"]: - pytest.skip("Not implemented in jax, torch or tensorflow backends") + if backend == "torch": + pytest.skip("Not implemented in torch backend") + + ## test shape + + # one observation, multiple forecasts + res = sr.crps_t( + 17, + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_t( + np.random.uniform(-5, 20, (4, 3)), + 12, + 12, + 12, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_t( + np.random.uniform(-5, 20, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative df parameter + with pytest.raises(ValueError): + sr.crps_t(7, -4, backend=backend, check_pars=True) + return + + # zero df parameter + with pytest.raises(ValueError): + sr.crps_t(7, 0, backend=backend, check_pars=True) + return + + # negative scale parameter + with pytest.raises(ValueError): + sr.crps_t(7, 4, scale=-0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.crps_t(7, 4, scale=0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default location + res0 = sr.crps_t(-3, 2.1, backend=backend) + res = sr.crps_t(-3, 2.1, location=0, backend=backend) + assert np.isclose(res0, res) - obs, df, mu, sigma = 11.1, 5.2, 13.8, 2.3 - expected = 1.658226 - res = sr.crps_t(obs, df, mu, sigma, backend=backend) + # default scale + res = sr.crps_t(-3, 2.1, location=0, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.crps_t(-3 + 0.1, 2.1, location=0.1, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to rescaling + res0 = sr.crps_t((-3 - 0.1) / 0.8, 2.1, backend=backend) + res = sr.crps_t(-3, 2.1, location=0.1, scale=0.8, backend=backend) + assert np.isclose(res0 * 0.8, res) + + # single observation + res = sr.crps_t(7.1, 4.2, 3.8, 0.3, backend=backend) + expected = 3.082766 assert np.isclose(res, expected) - obs, df = 0.7, 4.0 - expected = 0.4387929 - res = sr.crps_t(obs, df, backend=backend) + # negative observation + res = sr.crps_t(-3.1, 2.9, 4.0, 0.5, backend=backend) + expected = 6.682638 assert np.isclose(res, expected) + # multiple observations + res = sr.crps_t( + np.array([-2.9, 4.4]), + np.array([5.1, 19.4]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([3.183587, 1.914685]) + assert np.allclose(res, expected) + def test_crps_uniform(backend): - obs, min, max, lmass, umass = 0.3, -1.0, 2.1, 0.3, 0.1 - res = sr.crps_uniform(obs, min, max, lmass, umass, backend=backend) + ## test shape + + # one observation, multiple forecasts + res = sr.crps_uniform( + 7, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(5, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.crps_uniform( + np.random.uniform(0, 10, (4, 3)), + 1, + 7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.crps_uniform( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(5, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative lmass parameter + with pytest.raises(ValueError): + sr.crps_uniform(0.7, lmass=-0.1, backend=backend, check_pars=True) + return + + # lmass parameter greater than one + with pytest.raises(ValueError): + sr.crps_uniform(0.7, lmass=4, backend=backend, check_pars=True) + return + + # negative umass parameter + with pytest.raises(ValueError): + sr.crps_uniform(0.7, umass=-0.1, backend=backend, check_pars=True) + return + + # umass parameter greater than one + with pytest.raises(ValueError): + sr.crps_uniform(0.7, umass=4, backend=backend, check_pars=True) + return + + # lmass + umass >= 1 + with pytest.raises(ValueError): + sr.crps_uniform(0.7, lmass=0.2, umass=0.9, backend=backend, check_pars=True) + return + + # min >= max + with pytest.raises(ValueError): + sr.crps_uniform(0.7, min=2, max=1, backend=backend, check_pars=True) + return + + ## test correctness + + # default min + res0 = sr.crps_uniform(0.7, backend=backend) + res = sr.crps_uniform(0.7, min=0, backend=backend) + assert np.isclose(res0, res) + + # default sigma + res = sr.crps_uniform(0.7, min=0, max=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.crps_uniform(0.7 + 0.1, min=0.1, max=1.1, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.crps_uniform(0.3, -1.0, 2.1, 0.3, 0.1, backend=backend) expected = 0.3960968 assert np.isclose(res, expected) - obs, min, max, lmass = -17.9, -15.2, -8.7, 0.2 - res = sr.crps_uniform(obs, min, max, lmass, backend=backend) - expected = 4.086667 + res = sr.crps_uniform(2.2, 0.1, 3.1, backend=backend) + expected = 0.37 assert np.isclose(res, expected) - obs, min, max = 2.2, 0.1, 3.1 - res = sr.crps_uniform(obs, min, max, backend=backend) - expected = 0.37 + # observation outside of interval range + res = sr.crps_uniform(-17.9, -15.2, -8.7, 0.2, backend=backend) + expected = 4.086667 assert np.isclose(res, expected) + + # multiple observations + res = sr.crps_uniform( + np.array([-2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([10.1, 2.1]), + backend=backend, + ) + expected = np.array([7.0, 2.4]) + assert np.allclose(res, expected) diff --git a/tests/test_logs.py b/tests/test_logs.py index 53857ea..321d9b4 100644 --- a/tests/test_logs.py +++ b/tests/test_logs.py @@ -68,452 +68,1872 @@ def test_clogs(backend): assert np.isclose(res, expected) -def test_beta(backend): - if backend == "torch": - pytest.skip("Not implemented in torch backend") +def test_logs_beta(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_beta( + 0.1, + np.random.uniform(0, 3, (4, 3)), + np.random.uniform(0, 3, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_beta( + np.random.uniform(0, 1, (4, 3)), + 2.4, + 0.5, + backend=backend, + ) + assert res.shape == (4, 3) + # multiple observations, multiple forecasts res = sr.logs_beta( - np.random.uniform(0, 1, (3, 3)), - np.random.uniform(0, 3, (3, 3)), - 1.1, + np.random.uniform(0, 1, (4, 3)), + np.random.uniform(0, 3, (4, 3)), + np.random.uniform(0, 3, (4, 3)), backend=backend, ) - assert res.shape == (3, 3) - assert not np.any(np.isnan(res)) + assert res.shape == (4, 3) + + ## test exceptions + + # lower bound smaller than upper bound + with pytest.raises(ValueError): + sr.logs_beta( + 0.3, 0.7, 1.1, lower=1.0, upper=0.0, backend=backend, check_pars=True + ) + return + + # lower bound equal to upper bound + with pytest.raises(ValueError): + sr.logs_beta( + 0.3, 0.7, 1.1, lower=0.5, upper=0.5, backend=backend, check_pars=True + ) + return + + # negative shape parameters + with pytest.raises(ValueError): + sr.logs_beta(0.3, -0.7, 1.1, backend=backend, check_pars=True) + return + + with pytest.raises(ValueError): + sr.logs_beta(0.3, 0.7, -1.1, backend=backend, check_pars=True) + return + + ## correctness tests - # TODO: investigate why JAX results are different from other backends - # (would fail test with smaller tolerance) - obs, shape1, shape2, lower, upper = 0.3, 0.7, 1.1, 0.0, 1.0 - res = sr.logs_beta(obs, shape1, shape2, lower, upper, backend=backend) + # single forecast + res = sr.logs_beta(0.3, 0.7, 1.1, backend=backend) expected = -0.04344567 - assert np.isclose(res, expected, atol=1e-4) + assert np.isclose(res, expected) - obs, shape1, shape2, lower, upper = -3.0, 0.9, 2.1, -5.0, 4.0 - res = sr.logs_beta(obs, shape1, shape2, lower, upper, backend=backend) - expected = 1.74193 + # custom bounds + res = sr.logs_beta(-3.0, 0.7, 1.1, lower=-5.0, upper=4.0, backend=backend) + expected = 2.053211 assert np.isclose(res, expected) - obs, shape1, shape2, lower, upper = -3.0, 0.9, 2.1, -2.0, 4.0 - res = sr.logs_beta(obs, shape1, shape2, lower, upper, backend=backend) + # observation outside bounds + res = sr.logs_beta(-3.0, 0.7, 1.1, lower=-1.3, upper=4.0, backend=backend) expected = float("inf") assert np.isclose(res, expected) + # multiple forecasts + res = sr.logs_beta( + -3.0, + np.array([0.7, 2.0]), + np.array([1.1, 4.8]), + lower=np.array([-5.0, -5.0]), + upper=np.array([4.0, 4.0]), + ) + expected = np.array([2.053211, 1.329823]) + assert np.allclose(res, expected) + + +def test_logs_binomial(backend): + if backend == "torch": + pytest.skip("Not implemented in torch backend") + + ## test shape + + # res = sr.logs_binomial( + # np.random.randint(0, 10, size=(4, 3)), + # np.full((4, 3), 10), + # np.random.uniform(0, 1, (4, 3)), + # backend=backend, + # ) + # assert res.shape == (4, 3) + # + + ones = np.ones(2) + k, n, p = 8, 10, 0.9 + s = sr.logs_binomial(k * ones, n, p, backend=backend) + assert np.isclose(s, np.array([1.64139182, 1.64139182])).all() + s = sr.logs_binomial(k * ones, n * ones, p, backend=backend) + assert np.isclose(s, np.array([1.64139182, 1.64139182])).all() + s = sr.logs_binomial(k * ones, n * ones, p * ones, backend=backend) + assert np.isclose(s, np.array([1.64139182, 1.64139182])).all() + s = sr.logs_binomial(k, n * ones, p * ones, backend=backend) + assert np.isclose(s, np.array([1.64139182, 1.64139182])).all() + s = sr.logs_binomial(k * ones, n, p * ones, backend=backend) + assert np.isclose(s, np.array([1.64139182, 1.64139182])).all() + + ## test exceptions + + # negative size parameter + with pytest.raises(ValueError): + sr.logs_binomial(7, -1, 0.9, backend=backend, check_pars=True) + return + + # zero size parameter + with pytest.raises(ValueError): + sr.logs_binomial(2.1, 0, 0.1, backend=backend, check_pars=True) + return + + # negative prob parameter + with pytest.raises(ValueError): + sr.logs_binomial(7, 15, -0.1, backend=backend, check_pars=True) + return -def test_binomial(backend): - # test correctness + # prob parameter greater than one + with pytest.raises(ValueError): + sr.logs_binomial(1, 10, 1.1, backend=backend, check_pars=True) + return + + # non-integer size parameter + with pytest.raises(ValueError): + sr.logs_binomial(4, 8.99, 0.5, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation res = sr.logs_binomial(8, 10, 0.9, backend=backend) expected = 1.641392 assert np.isclose(res, expected) + # negative observation res = sr.logs_binomial(-8, 10, 0.9, backend=backend) expected = float("inf") assert np.isclose(res, expected) - res = sr.logs_binomial(18, 30, 0.5, backend=backend) - expected = 2.518839 + # zero prob parameter + res = sr.logs_binomial(71, 212, 0, backend=backend) + expected = float("inf") assert np.isclose(res, expected) - -def test_exponential(backend): - obs, rate = 0.3, 0.1 - res = sr.logs_exponential(obs, rate, backend=backend) - expected = 2.332585 + # observation larger than size + res = sr.logs_binomial(18, 10, 0.9, backend=backend) + expected = float("inf") assert np.isclose(res, expected) - obs, rate = -1.3, 2.4 - res = sr.logs_exponential(obs, rate, backend=backend) + # non-integer observation + res = sr.logs_binomial(5.6, 10, 0.9, backend=backend) expected = float("inf") assert np.isclose(res, expected) - obs, rate = 0.0, 0.9 - res = sr.logs_exponential(obs, rate, backend=backend) - expected = 0.1053605 - assert np.isclose(res, expected) + # multiple observations + res = sr.logs_binomial(np.array([5.0, 17.2]), 10, 0.9, backend=backend) + expected = np.array([6.510299, float("inf")]) + assert np.allclose(res, expected) -def test_gamma(backend): - obs, shape, rate = 0.2, 1.1, 0.7 - expected = 0.6434138 +def test_logs_exponential(backend): + ## test shape - res = sr.logs_gamma(obs, shape, rate, backend=backend) - assert np.isclose(res, expected) + # one observation, multiple forecasts + res = sr.logs_exponential( + 7.2, + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - res = sr.logs_gamma(obs, shape, scale=1 / rate, backend=backend) - assert np.isclose(res, expected) + # multiple observations, one forecast + res = sr.logs_exponential( + np.random.uniform(0, 10, (4, 3)), + 7.2, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_exponential( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + # negative rate with pytest.raises(ValueError): - sr.logs_gamma(obs, shape, rate, scale=1 / rate, backend=backend) + sr.logs_exponential(3.2, -1, backend=backend, check_pars=True) return + # zero rate with pytest.raises(ValueError): - sr.logs_gamma(obs, shape, backend=backend) + sr.logs_exponential(3.2, 0, backend=backend, check_pars=True) return + ## test correctness -def test_gev(backend): - obs, xi, mu, sigma = 0.3, 0.7, 0.0, 1.0 - res0 = sr.logs_gev(obs, xi, backend=backend) - expected = 1.22455 - assert np.isclose(res0, expected) - - res = sr.logs_gev(obs, xi, mu, sigma, backend=backend) - assert np.isclose(res, res0) - - obs, xi, mu, sigma = 0.3, -0.7, 0.0, 1.0 - res = sr.logs_gev(obs, xi, mu, sigma, backend=backend) - expected = 0.8151139 - assert np.isclose(res, expected) - - obs, xi, mu, sigma = 0.3, 0.0, 0.0, 1.0 - res = sr.logs_gev(obs, xi, mu, sigma, backend=backend) - expected = 1.040818 + # single observation + res = sr.logs_exponential(3, 0.7, backend=backend) + expected = 2.456675 assert np.isclose(res, expected) - obs, xi, mu, sigma = -3.6, 0.7, 0.0, 1.0 - res = sr.logs_gev(obs, xi, mu, sigma, backend=backend) + # negative observation + res = sr.logs_exponential(-3, 0.7, backend=backend) expected = float("inf") assert np.isclose(res, expected) - obs, xi, mu, sigma = -3.6, 1.7, -4.0, 2.7 - res = sr.logs_gev(obs, xi, mu, sigma, backend=backend) - expected = 2.226233 - assert np.isclose(res, expected) + # multiple observations + res = sr.logs_exponential(np.array([5.6, 17.2]), 10.1, backend=backend) + expected = np.array([54.24746, 171.40746]) + assert np.allclose(res, expected) -def test_gpd(backend): - obs, shape, location, scale = 0.8, 0.9, 0.0, 1.0 - res0 = sr.logs_gpd(obs, shape, backend=backend) - expected = 1.144907 - assert np.isclose(res0, expected) +def test_logs_2pexponential(backend): + ## test shape - res = sr.logs_gpd(obs, shape, location, scale, backend=backend) - assert np.isclose(res0, res) + # one observation, multiple forecasts + res = sr.logs_2pexponential( + 6.2, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(-2, 2, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_2pexponential( + np.random.uniform(-5, 5, (4, 3)), + 2.4, + 10.1, + np.random.uniform(-2, 2, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_2pexponential( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(-2, 2, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - obs = -0.8 - res = sr.logs_gpd(obs, shape, location, scale, backend=backend) - expected = float("inf") - assert np.isclose(res, expected) + ## test exceptions - obs, shape = 0.8, -0.9 - res = sr.logs_gpd(obs, shape, location, scale, backend=backend) - expected = 0.1414406 - assert np.isclose(res, expected) + # negative scale parameters + with pytest.raises(ValueError): + sr.logs_2pexponential(3.2, -1, 2, 2.1, backend=backend, check_pars=True) + return + + with pytest.raises(ValueError): + sr.logs_2pexponential(3.2, 1, -2, 2.1, backend=backend, check_pars=True) + return + + # zero scale parameters + with pytest.raises(ValueError): + sr.logs_2pexponential(3.2, 0, 2, 2.1, backend=backend, check_pars=True) + return + + with pytest.raises(ValueError): + sr.logs_2pexponential(3.2, 1, 0, 2.1, backend=backend, check_pars=True) + return - shape, scale = 0.0, 2.1 - res = sr.logs_gpd(obs, shape, location, scale, backend=backend) - expected = 1.12289 + ## test correctness + + # single observation + res = sr.logs_2pexponential(0.3, 0.1, 4.3, 0.0, backend=backend) + expected = 1.551372 assert np.isclose(res, expected) - obs, shape, location, scale = -17.4, 2.3, -21.0, 4.3 - res = sr.logs_gpd(obs, shape, location, scale, backend=backend) - expected = 2.998844 + # negative location + res = sr.logs_2pexponential(-20.8, 7.1, 2.0, -25.4, backend=backend) + expected = 4.508274 assert np.isclose(res, expected) + # multiple observations + res = sr.logs_2pexponential( + np.array([-1.2, 3.7]), 0.1, 4.3, np.array([-0.1, 0.1]), backend=backend + ) + expected = np.array([12.481605, 2.318814]) + assert np.allclose(res, expected) -def test_hypergeometric(backend): - res = sr.logs_hypergeometric(5, 7, 13, 12) - expected = 1.251525 - assert np.isclose(res, expected) + # check equivalence with laplace distribution + res = sr.logs_2pexponential(-20.8, 2.0, 2.0, -25.4, backend=backend) + res0 = sr.logs_laplace(-20.8, -25.4, 2.0, backend=backend) + assert np.isclose(res, res0) - res = sr.logs_hypergeometric(5 * np.ones((2, 2)), 7, 13, 12, backend=backend) - assert res.shape == (2, 2) - res = sr.logs_hypergeometric(5, 7 * np.ones((2, 2)), 13, 12, backend=backend) - assert res.shape == (2, 2) +def test_logs_gamma(backend): + ## test shape + # one observation, multiple forecasts + res = sr.logs_gamma( + 6.2, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) -def test_logis(backend): - obs, mu, sigma = 17.1, 13.8, 3.3 - res = sr.logs_logistic(obs, mu, sigma, backend=backend) - expected = 2.820446 - assert np.isclose(res, expected) + # multiple observations, one forecast + res = sr.logs_gamma( + np.random.uniform(0, 10, (4, 3)), + 4.0, + 2.1, + backend=backend, + ) + assert res.shape == (4, 3) - obs, mu, sigma = 3.1, 4.0, 0.5 - res = sr.logs_logistic(obs, mu, sigma, backend=backend) - expected = 1.412808 - assert np.isclose(res, expected) + # multiple observations, multiple forecasts + res = sr.logs_gamma( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + ## test exceptions -def test_loglogistic(backend): - obs, mulog, sigmalog = 3.0, 0.1, 0.9 - res = sr.logs_loglogistic(obs, mulog, sigmalog, backend=backend) - expected = 2.672729 - assert np.isclose(res, expected) + # rate and scale provided + with pytest.raises(ValueError): + sr.logs_gamma(3.2, -1, rate=2, scale=0.4, backend=backend, check_pars=True) + return - obs, mulog, sigmalog = 0.0, 0.1, 0.9 - res = sr.logs_loglogistic(obs, mulog, sigmalog, backend=backend) - expected = float("inf") - assert np.isclose(res, expected) + # neither rate nor scale provided + with pytest.raises(ValueError): + sr.logs_gamma(3.2, -1, backend=backend, check_pars=True) + return - obs, mulog, sigmalog = 12.0, 12.1, 4.9 - res = sr.logs_loglogistic(obs, mulog, sigmalog, backend=backend) - expected = 6.299409 - assert np.isclose(res, expected) + # negative shape parameter + with pytest.raises(ValueError): + sr.logs_gamma(3.2, -1, 2, backend=backend, check_pars=True) + return + # negative rate/scale parameter + with pytest.raises(ValueError): + sr.logs_gamma(3.2, 1, -2, backend=backend, check_pars=True) + return -def test_lognormal(backend): - obs, mulog, sigmalog = 3.0, 0.1, 0.9 - res = sr.logs_lognormal(obs, mulog, sigmalog, backend=backend) - expected = 2.527762 - assert np.isclose(res, expected) + # zero shape parameter + with pytest.raises(ValueError): + sr.logs_gamma(3.2, 0.0, 2, backend=backend, check_pars=True) + return - obs, mulog, sigmalog = 0.0, 0.1, 0.9 - res = sr.logs_lognormal(obs, mulog, sigmalog, backend=backend) - expected = float("inf") - assert np.isclose(res, expected) + ## test correctness - obs, mulog, sigmalog = 12.0, 12.1, 4.9 - res = sr.logs_lognormal(obs, mulog, sigmalog, backend=backend) - expected = 6.91832 + # single observation + res = sr.logs_gamma(0.2, 1.1, 0.7, backend=backend) + expected = 0.6434138 assert np.isclose(res, expected) - -def test_exponential2(backend): - obs, location, scale = 8.3, 7.0, 1.2 - res = sr.logs_exponential2(obs, location, scale, backend=backend) - expected = 1.265655 + # scale instead of rate + res = sr.logs_gamma(0.2, 1.1, scale=1 / 0.7, backend=backend) assert np.isclose(res, expected) - obs, location, scale = -1.3, 2.4, 4.5 - res = sr.logs_exponential2(obs, location, scale, backend=backend) + # negative observation + res = sr.logs_gamma(-4.2, 1.1, rate=0.7, backend=backend) expected = float("inf") assert np.isclose(res, expected) - obs, location, scale = 0.9, 0.9, 2.0 - res = sr.logs_exponential2(obs, location, scale, backend=backend) - expected = 0.6931472 - assert np.isclose(res, expected) + # multiple observations + res = sr.logs_gamma(np.array([-1.2, 2.3]), 1.1, 0.7, backend=backend) + expected = np.array([float("inf"), 1.869179]) + assert np.allclose(res, expected) - res0 = sr.logs_exponential(obs, 1 / scale, backend=backend) - res = sr.logs_exponential2(obs, 0.0, scale, backend=backend) - assert np.isclose(res, res0) + # check equivalence with exponential distribution + res0 = sr.logs_exponential(3.1, 2, backend=backend) + res = sr.logs_gamma(3.1, 1, 2, backend=backend) + assert np.isclose(res0, res) -def test_2pexponential(backend): - obs, scale1, scale2, location = 0.3, 0.1, 4.3, 0.0 - res = sr.logs_2pexponential(obs, scale1, scale2, location, backend=backend) - expected = 1.551372 - assert np.isclose(res, expected) +def test_logs_gev(backend): + ## test shape - obs, scale1, scale2, location = -20.8, 7.1, 2.0, -15.4 - res = sr.logs_2pexponential(obs, scale1, scale2, location, backend=backend) - expected = 2.968838 - assert np.isclose(res, expected) + # one observation, multiple forecasts + res = sr.logs_gev( + 6.2, + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + 0.5, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_gev( + np.random.uniform(-5, 5, (4, 3)), + -4.9, + 2.3, + 0.7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_gev( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + 0.7, + backend=backend, + ) + assert res.shape == (4, 3) + ## test exceptions -def test_laplace(backend): - obs, location, scale = -3.0, 0.1, 0.9 - res = sr.logs_laplace(obs, backend=backend) - expected = 3.693147 - assert np.isclose(res, expected) + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_gev(0.7, 0.5, 2.0, -0.5, backend=backend, check_pars=True) + return - res = sr.logs_laplace(obs, location, backend=backend) - expected = 3.793147 - assert np.isclose(res, expected) + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_gev(0.7, 0.5, 2.0, 0.0, backend=backend, check_pars=True) + return - res = sr.logs_laplace(obs, location, scale, backend=backend) - expected = 4.032231 - assert np.isclose(res, expected) + ## test correctness + # test default location + res0 = sr.logs_gev(0.3, 0.0, backend=backend) + res = sr.logs_gev(0.3, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) -def test_loglaplace(backend): - obs, locationlog, scalelog = 3.0, 0.1, 0.9 - res = sr.logs_loglaplace(obs, locationlog, scalelog, backend=backend) - expected = 2.795968 - assert np.isclose(res, expected) + # test default scale + res = sr.logs_gev(0.3, 0.0, 0.0, 1.0, backend=backend) + assert np.isclose(res0, res) - obs, locationlog, scalelog = 0.0, 0.1, 0.9 - res = sr.logs_loglaplace(obs, locationlog, scalelog, backend=backend) - expected = float("inf") + # test invariance to shift + mu = 0.1 + res = sr.logs_gev(0.3 + mu, 0.0, mu, 1.0, backend=backend) + assert np.isclose(res0, res) + + # positive shape parameter + res = sr.logs_gev(0.3, 0.7, backend=backend) + expected = 1.22455 assert np.isclose(res, expected) - obs, locationlog, scalelog = 12.0, 12.1, 4.9 - res = sr.logs_loglaplace(obs, locationlog, scalelog, backend=backend) - expected = 6.729553 + # negative shape parameter + res = sr.logs_gev(0.3, -0.7, backend=backend) + expected = 0.8151139 assert np.isclose(res, expected) + # zero shape parameter + res = sr.logs_gev(0.3, 0.0, backend=backend) + expected = 1.040818 + assert np.isclose(res, expected) -def test_mixnorm(backend): - obs, m, s, w = 0.3, [0.0, -2.9, 0.9], [0.5, 1.4, 0.7], [1 / 3, 1 / 3, 1 / 3] - res = sr.logs_mixnorm(obs, m, s, w, backend=backend) - expected = 1.019742 + # shape parameter greater than one + res = sr.logs_gev(0.7, 1.5, 0.0, 0.5, backend=backend) + expected = 1.662878 assert np.isclose(res, expected) - res0 = sr.logs_mixnorm(obs, m, s, backend=backend) - assert np.isclose(res, res0) + # multiple observations + res = sr.logs_gev(np.array([0.9, -2.1]), -0.7, 0, 1.4, backend=backend) + expected = np.array([1.018374, 2.817275]) + assert np.allclose(res, expected) - w = [0.3, 0.1, 0.6] - res = sr.logs_mixnorm(obs, m, s, w, backend=backend) - expected = 0.8235977 - assert np.isclose(res, expected) - obs = [-1.6, 0.3] - m = [[0.0, -2.9], [0.6, 0.0], [-1.1, -2.3]] - s = [[0.5, 1.7], [1.1, 0.7], [1.4, 1.5]] - res1 = sr.logs_mixnorm(obs, m, s, mc_axis=0, backend=backend) +def test_logs_gpd(backend): + ## test shape - m = [[0.0, 0.6, -1.1], [-2.9, 0.0, -2.3]] - s = [[0.5, 1.1, 1.4], [1.7, 0.7, 1.5]] - res2 = sr.logs_mixnorm(obs, m, s, backend=backend) - assert np.allclose(res1, res2) + # one observation, multiple forecasts + res = sr.logs_gpd( + 6.2, + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_gpd( + np.random.uniform(-5, 5, (4, 3)), + -4.9, + 2.3, + 0.7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_gpd( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-10, 1, (4, 3)), + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + ## test exceptions -def test_negbinom(backend): - if backend in ["jax", "torch"]: - pytest.skip("Not implemented in jax or torch backends") + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_gpd(0.7, 0.5, 2.0, -0.5, backend=backend, check_pars=True) + return - # test exceptions + # zero scale parameter with pytest.raises(ValueError): - sr.logs_negbinom(1, 1.1, 0.1, mu=0.1, backend=backend) + sr.logs_gpd(0.7, 0.5, 2.0, 0.0, backend=backend, check_pars=True) return - # TODO: investigate why JAX and torch results are different from other backends - # (they fail the test) - obs, n, prob = 2.0, 7.0, 0.8 - res = sr.logs_negbinom(obs, n, prob, backend=backend) - expected = 1.448676 + ## test correctness + + # test default location + res0 = sr.logs_gpd(0.3, 0.0, backend=backend) + res = sr.logs_gpd(0.3, 0.0, 0.0, backend=backend) + assert np.isclose(res0, res) + + # test default scale + res = sr.logs_gpd(0.3, 0.0, 0.0, 1.0, backend=backend) + assert np.isclose(res0, res) + + # test invariance to shift + res = sr.logs_gpd(0.3 + 0.1, 0.0, 0.1, 1.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_gpd(0.3, 0.9, backend=backend) + expected = 0.5045912 assert np.isclose(res, expected) - obs, n, prob = 1.5, 2.0, 0.5 - res = sr.logs_negbinom(obs, n, prob, backend=backend) + # negative observation + res = sr.logs_gpd(-0.3, 0.9, backend=backend) expected = float("inf") assert np.isclose(res, expected) - obs, n, prob = -1.0, 17.0, 0.1 - res = sr.logs_negbinom(obs, n, prob, backend=backend) - expected = float("inf") + # negative shape parameter + res = sr.logs_gpd(0.3, -0.9, backend=backend) + expected = 0.03496786 assert np.isclose(res, expected) - obs, n, mu = 2.0, 11.0, 7.3 - res = sr.logs_negbinom(obs, n, mu=mu, backend=backend) - expected = 3.247462 + # shape parameter greater than one + res = sr.logs_gpd(0.7, 1.5, 0.0, 0.5, backend=backend) + expected = 1.192523 assert np.isclose(res, expected) + # multiple observations + res = sr.logs_gpd(np.array([0.9, -2.1]), -0.7, 0, 1.4, backend=backend) + expected = np.array([0.5926881, float("inf")]) + assert np.allclose(res, expected) -def test_normal(backend): - res = sr.logs_normal(0.0, 0.1, 0.1, backend=backend) - assert np.isclose(res, -0.8836466, rtol=1e-5) +def test_logs_tlogis(backend): + ## test shape -def test_2pnormal(backend): - obs, scale1, scale2, location = 29.1, 4.6, 1.3, 27.9 - res = sr.logs_2pnormal(obs, scale1, scale2, location, backend=backend) - expected = 2.426779 - assert np.isclose(res, expected) + # one observation, multiple forecasts + res = sr.logs_tlogistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - obs, scale1, scale2, location = -2.2, 1.6, 3.3, -1.9 - res = sr.logs_2pnormal(obs, scale1, scale2, location, backend=backend) - expected = 1.832605 - assert np.isclose(res, expected) + # multiple observations, one forecast + res = sr.logs_tlogistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) - obs, scale, location = 1.5, 4.5, 5.4 - res0 = sr.logs_normal(obs, location, scale, backend=backend) - res = sr.logs_2pnormal(obs, scale, scale, location, backend=backend) - assert np.isclose(res, res0) - obs, mu, sigma = 17.1, 13.8, 3.3 - res = sr.logs_normal(obs, mu, sigma, backend=backend) - expected = 2.612861 - assert np.isclose(res, expected) + # multiple observations, multiple forecasts + res = sr.logs_tlogistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - obs, mu, sigma = 3.1, 4.0, 0.5 - res = sr.logs_normal(obs, mu, sigma, backend=backend) - expected = 1.845791 - assert np.isclose(res, expected) + ## test exceptions + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_tlogistic(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return -def test_poisson(backend): - obs, mean = 1.0, 3.0 - res = sr.logs_poisson(obs, mean, backend=backend) - expected = 1.901388 + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_tlogistic(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.logs_tlogistic( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.logs_tlogistic(1.8, backend=backend) + res = sr.logs_tlogistic(1.8, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_tlogistic(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.485943 assert np.isclose(res, expected) - obs, mean = 1.5, 2.3 - res = sr.logs_poisson(obs, mean, backend=backend) + # observation below lower bound + res = sr.logs_tlogistic(-5.8, -3.0, 3.3, -5.0, 4.7, backend=backend) expected = float("inf") assert np.isclose(res, expected) - obs, mean = -1.0, 1.5 - res = sr.logs_poisson(obs, mean, backend=backend) + # observation above upper bound + res = sr.logs_tlogistic(7.8, -3.0, 3.3, -5.0, 4.7, backend=backend) expected = float("inf") assert np.isclose(res, expected) + # aligns with logs_logistic + res0 = sr.logs_logistic(1.8, -3.0, 3.3, backend=backend) + res = sr.logs_tlogistic(1.8, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.logs_tlogistic( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([3.631568, 3.778196]) + assert np.allclose(res, expected) -def test_t(backend): - if backend in ["jax", "torch", "tensorflow"]: - pytest.skip("Not implemented in jax, torch or tensorflow backends") - obs, df, mu, sigma = 11.1, 5.2, 13.8, 2.3 - res = sr.logs_t(obs, df, mu, sigma, backend=backend) - expected = 2.528398 - assert np.isclose(res, expected) +def test_logs_tnormal(backend): + ## test shape - obs, df = 0.7, 4.0 - res = sr.logs_t(obs, df, backend=backend) - expected = 1.269725 - assert np.isclose(res, expected) + # one observation, multiple forecasts + res = sr.logs_tnormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + # multiple observations, one forecast + res = sr.logs_tnormal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) -def test_tlogis(backend): - obs, location, scale, lower, upper = 4.9, 3.5, 2.3, 0.0, 20.0 - res = sr.logs_tlogistic(obs, location, scale, lower, upper, backend=backend) - expected = 2.11202 - assert np.isclose(res, expected) + # multiple observations, multiple forecasts + res = sr.logs_tnormal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) - # aligns with logs_logistic - res0 = sr.logs_logistic(obs, location, scale, backend=backend) - res = sr.logs_tlogistic(obs, location, scale, backend=backend) - assert np.isclose(res, res0) + ## test exceptions + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_tnormal(0.7, 2.0, -0.5, backend=backend, check_pars=True) + return -def test_tnormal(backend): - obs, location, scale, lower, upper = 4.2, 2.9, 2.2, 1.5, 17.3 - res = sr.logs_tnormal(obs, location, scale, lower, upper, backend=backend) - expected = 1.577806 - assert np.isclose(res, expected) + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_tnormal(0.7, 2.0, 0.0, backend=backend, check_pars=True) + return - obs, location, scale, lower, upper = -1.0, 2.9, 2.2, 1.5, 17.3 - res = sr.logs_tnormal(obs, location, scale, lower, upper, backend=backend) - expected = float("inf") - assert np.isclose(res, expected) + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.logs_tnormal( + 0.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return - # aligns with logs_normal - res0 = sr.logs_normal(obs, location, scale, backend=backend) - res = sr.logs_tnormal(obs, location, scale, backend=backend) - assert np.isclose(res, res0) + ## test correctness + # default values + res0 = sr.logs_tnormal(1.8, backend=backend) + res = sr.logs_tnormal(1.8, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) -def test_tt(backend): - if backend in ["jax", "torch", "tensorflow"]: - pytest.skip("Not implemented in jax, torch or tensorflow backends") + # single observation + res = sr.logs_tnormal(1.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.839353 + assert np.isclose(res, expected) - obs, df, location, scale, lower, upper = 1.9, 2.9, 3.1, 4.2, 1.5, 17.3 - res = sr.logs_tt(obs, df, location, scale, lower, upper, backend=backend) - expected = 2.002856 + # observation below lower bound + res = sr.logs_tnormal(-5.8, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = float("inf") assert np.isclose(res, expected) - obs, df, location, scale, lower, upper = -1.0, 2.9, 3.1, 4.2, 1.5, 17.3 - res = sr.logs_tt(obs, df, location, scale, lower, upper, backend=backend) + # observation above upper bound + res = sr.logs_tnormal(7.8, -3.0, 3.3, -5.0, 4.7, backend=backend) expected = float("inf") assert np.isclose(res, expected) - # aligns with logs_t - res0 = sr.logs_t(obs, df, location, scale, backend=backend) - res = sr.logs_tt(obs, df, location, scale, backend=backend) + # aligns with logs_normal + res0 = sr.logs_normal(1.8, -3.0, 3.3, backend=backend) + res = sr.logs_tnormal(1.8, -3.0, 3.3, backend=backend) assert np.isclose(res, res0) + # multiple observations + res = sr.logs_tnormal( + np.array([1.8, -2.3]), 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([3.415483, 3.726619]) + assert np.allclose(res, expected) -def test_uniform(backend): - obs, min, max = 0.3, -1.0, 2.1 - res = sr.logs_uniform(obs, min, max, backend=backend) - expected = 1.131402 - assert np.isclose(res, expected) - - obs, min, max = -17.9, -15.2, -8.7 - res = sr.logs_uniform(obs, min, max, backend=backend) - expected = float("inf") - assert np.isclose(res, expected) - obs, min, max = 0.1, 0.1, 3.1 - res = sr.logs_uniform(obs, min, max, backend=backend) +def test_logs_tt(backend): + if backend in ["jax", "torch", "tensorflow"]: + pytest.skip("Not implemented in jax, torch or tensorflow backends") + + ## test shape + + # one observation, multiple forecasts + res = sr.logs_tt( + 17, + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_tt( + np.random.uniform(-10, 10, (4, 3)), + 11.1, + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_tt( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative df parameter + with pytest.raises(ValueError): + sr.logs_tt(0.7, -1.7, 2.0, 0.5, backend=backend, check_pars=True) + return + + # zero df parameter + with pytest.raises(ValueError): + sr.logs_tt(0.7, 0.0, 2.0, 0.5, backend=backend, check_pars=True) + return + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_tt(0.7, 4.7, 2.0, -0.5, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_tt(0.7, 4.7, 2.0, 0.0, backend=backend, check_pars=True) + return + + # lower bound larger than upper bound + with pytest.raises(ValueError): + sr.logs_tt( + 0.7, 4.7, 2.0, 0.4, lower=-1, upper=-4, backend=backend, check_pars=True + ) + return + + ## test correctness + + # default values + res0 = sr.logs_tt(1.8, 6.9, backend=backend) + res = sr.logs_tt(1.8, 6.9, 0.0, 1.0, -np.inf, np.inf, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_tt(1.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = 2.836673 + assert np.isclose(res, expected) + + # observation below lower bound + res = sr.logs_tt(-5.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # observation above upper bound + res = sr.logs_tt(7.8, 6.9, -3.0, 3.3, -5.0, 4.7, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # aligns with logs_t + res0 = sr.logs_t(1.8, 6.9, -3.0, 3.3, backend=backend) + res = sr.logs_tt(1.8, 6.9, -3.0, 3.3, backend=backend) + assert np.isclose(res, res0) + + # multiple observations + res = sr.logs_tt( + np.array([1.8, -2.3]), 6.9, 9.1, 11.1, -4.0, np.inf, backend=backend + ) + expected = np.array([3.453054, 3.774802]) + assert np.allclose(res, expected) + + +def test_logs_hypergeometric(backend): + if backend == "torch": + pytest.skip("Currently not working in torch backend") + + ## test shape + + # one observation, multiple forecasts + res = sr.logs_hypergeometric( + 17, + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(1, 10, size=(4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_hypergeometric( + np.random.randint(0, 20, size=(4, 3)), + 13, + 4, + 7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_hypergeometric( + np.random.randint(0, 20, size=(4, 3)), + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(10, 20, size=(4, 3)), + np.random.randint(1, 10, size=(4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative m parameter + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=-1, n=10, k=5, backend=backend, check_pars=True) + return + + # negative n parameter + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=11, n=-2, k=5, backend=backend, check_pars=True) + return + + # negative k parameter + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=11, n=10, k=-5, backend=backend, check_pars=True) + return + + # k > m + n + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=11, n=10, k=25, backend=backend, check_pars=True) + return + + # non-integer m parameter + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=11.1, n=10, k=5, backend=backend, check_pars=True) + return + + # non-integer n parameter + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=11, n=10.8, k=5, backend=backend, check_pars=True) + return + + # non-integer k parameter + with pytest.raises(ValueError): + sr.logs_hypergeometric(7, m=11, n=10, k=4.3, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.logs_hypergeometric(5, 7, 13, 12, backend=backend) + expected = 1.251525 + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_hypergeometric(-5, 7, 13, 12, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # non-integer observation + res = sr.logs_hypergeometric(5.6, 7, 13, 12, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_hypergeometric(np.array([2, 7.6, -2.1]), 7, 13, 3, backend=backend) + expected = np.array([1.429312, float("inf"), float("inf")]) + assert np.allclose(res, expected) + + +def test_logs_laplace(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_laplace( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_laplace( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_laplace( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_laplace(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_laplace(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default location + res0 = sr.logs_laplace(-3, backend=backend) + res = sr.logs_laplace(-3, location=0, backend=backend) + assert np.isclose(res0, res) + + # default scale + res = sr.logs_laplace(-3, location=0, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.logs_laplace(-3 + 0.1, location=0.1, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_laplace(-2.9, 0.1, backend=backend) + expected = 3.693147 + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_laplace( + np.array([-2.9, 4.1]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([3.401722, 2.792135]) + assert np.allclose(res, expected) + + +def test_logs_logis(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_logistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_logistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_logistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_logistic(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_logistic(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default location + res0 = sr.logs_logistic(-3, backend=backend) + res = sr.logs_logistic(-3, location=0, backend=backend) + assert np.isclose(res0, res) + + # default scale + res = sr.logs_logistic(-3, location=0, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.logs_logistic(-3 + 0.1, location=0.1, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_logistic(17.1, 13.8, 3.3, backend=backend) + expected = 2.820446 + assert np.isclose(res, expected) + + res = sr.logs_logistic(3.1, 4.0, 0.5, backend=backend) + expected = 1.412808 + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_logistic( + np.array([-2.9, 4.1]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([3.737788, 2.373456]) + assert np.allclose(res, expected) + + +def test_logs_loglaplace(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_loglaplace( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_loglaplace( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 0.9, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_loglaplace( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_loglaplace(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_loglaplace(7, 4, 0.0, backend=backend, check_pars=True) + return + + # scale parameter larger than one + with pytest.raises(ValueError): + sr.logs_loglaplace(7, 4, 1.1, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.logs_loglaplace(7.1, 3.8, 0.3, backend=backend) + expected = 7.582287 + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_loglaplace(-4.1, -3.8, 0.3, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_loglaplace( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([0.1, 0.9]), + backend=backend, + ) + expected = np.array([-0.1918345, 2.4231639]) + assert np.allclose(res, expected) + + +def test_logs_loglogistic(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_loglogistic( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_loglogistic( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 0.9, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_loglogistic( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_loglogistic(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_loglogistic(7, 4, 0.0, backend=backend, check_pars=True) + return + + # scale parameter larger than one + with pytest.raises(ValueError): + sr.logs_loglogistic(7, 4, 1.1, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.logs_loglogistic(7.1, 3.8, 0.3, backend=backend) + expected = 6.893475 + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_loglogistic(-4.1, -3.8, 0.3, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_loglogistic( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([0.1, 0.9]), + backend=backend, + ) + expected = np.array([0.1793931, 2.7936654]) + assert np.allclose(res, expected) + + +def test_logs_lognormal(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_lognormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_lognormal( + np.random.uniform(0, 10, (4, 3)), + 3.2, + 2.9, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_lognormal( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_lognormal(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_lognormal(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.logs_lognormal(7.1, 3.8, 0.3, backend=backend) + expected = 20.48201 + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_lognormal(-4.1, -3.8, 0.3, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_lognormal( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([0.1, 0.9]), + backend=backend, + ) + expected = np.array([-0.2566692, 2.3577601]) + assert np.allclose(res, expected) + + +def test_logs_mixnorm(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_mixnorm( + 17, + np.random.uniform(-5, 5, (4, 3, 5)), + np.random.uniform(0, 5, (4, 3, 5)), + np.random.uniform(0, 1, (4, 3, 5)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_mixnorm( + np.random.uniform(-5, 5, (4, 3)), + np.array([0.0, -2.9, 0.9, 3.2, 7.0]), + np.array([0.6, 2.9, 0.9, 3.2, 7.0]), + np.array([0.6, 2.9, 0.9, 0.2, 0.1]), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_mixnorm( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-5, 5, (4, 3, 5)), + np.random.uniform(0, 5, (4, 3, 5)), + np.random.uniform(0, 1, (4, 3, 5)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_mixnorm( + 7, np.array([4, 2]), np.array([-1, 2]), backend=backend, check_pars=True + ) + return + + # negative weight parameter + with pytest.raises(ValueError): + sr.logs_mixnorm( + 7, + np.array([4, 2]), + np.array([1, 2]), + np.array([-1, 2]), + backend=backend, + check_pars=True, + ) + return + + ## test correctness + + # single observation + obs, m, s, w = 0.3, [0.0, -2.9, 0.9], [0.5, 1.4, 0.7], [1 / 3, 1 / 3, 1 / 3] + res = sr.logs_mixnorm(obs, m, s, w, backend=backend) + expected = 1.019742 + assert np.isclose(res, expected) + + # default weights + res0 = sr.logs_mixnorm(obs, m, s, backend=backend) + assert np.isclose(res, res0) + + # non-constant weights + w = [0.3, 0.1, 0.6] + res = sr.logs_mixnorm(obs, m, s, w, backend=backend) + expected = 0.8235977 + assert np.isclose(res, expected) + + # weights that do not sum to one + w = [8.3, 0.1, 4.6] + res = sr.logs_mixnorm(obs, m, s, w, backend=backend) + expected = 0.5703479 + assert np.isclose(res, expected) + + # m-axis argument + obs = [-1.6, 0.3] + m = [[0.0, -2.9], [0.6, 0.0], [-1.1, -2.3]] + s = [[0.5, 1.7], [1.1, 0.7], [1.4, 1.5]] + res1 = sr.logs_mixnorm(obs, m, s, m_axis=0, backend=backend) + + m = [[0.0, 0.6, -1.1], [-2.9, 0.0, -2.3]] + s = [[0.5, 1.1, 1.4], [1.7, 0.7, 1.5]] + res2 = sr.logs_mixnorm(obs, m, s, backend=backend) + assert np.allclose(res1, res2) + + # check equality with normal distribution + obs = 1.6 + m, s, w = [1.8, 1.8], [2.3, 2.3], [0.6, 0.8] + res0 = sr.logs_normal(obs, m[0], s[0], backend=backend) + res = sr.logs_mixnorm(obs, m, s, w, backend=backend) + assert np.isclose(res0, res) + + +def test_logs_negbinom(backend): + if backend in ["jax", "torch", "tensorflow"]: + pytest.skip("Not implemented in jax, torch or tensorflow backends") + + ## test shape + + # one observation, multiple forecasts + res = sr.logs_negbinom( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_negbinom( + np.random.uniform(0, 20, (4, 3)), + 12, + 0.2, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_negbinom( + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 1, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # prob and mu are both provided + with pytest.raises(ValueError): + sr.logs_negbinom(0.3, 7.0, 0.8, mu=7.3, backend=backend) + + # neither prob nor mu are provided + with pytest.raises(ValueError): + sr.logs_negbinom(0.3, 8, backend=backend) + + # negative size parameter + with pytest.raises(ValueError): + sr.logs_negbinom(4.3, -8, 0.8, backend=backend, check_pars=True) + + # negative mu parameter + with pytest.raises(ValueError): + sr.logs_negbinom(4.3, 8, mu=-0.8, backend=backend, check_pars=True) + + # negative prob parameter + with pytest.raises(ValueError): + sr.logs_negbinom(4.3, 8, -0.8, backend=backend, check_pars=True) + + # prob parameter larger than one + with pytest.raises(ValueError): + sr.logs_negbinom(4.3, 8, 1.8, backend=backend, check_pars=True) + + ## test correctness + + # single observation + res = sr.logs_negbinom(2.0, 7.0, 0.8, backend=backend) + expected = 1.448676 + assert np.isclose(res, expected) + + # non-integer observation + res = sr.logs_negbinom(1.5, 2.4, 0.5, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_negbinom(-1.0, 17.0, 0.1, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # mu given instead of prob + res = sr.logs_negbinom(2.0, 11.0, mu=7.3, backend=backend) + expected = 3.247462 + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_negbinom(np.array([2.0, 7.0]), 7.0, 0.8, backend=backend) + expected = np.array([1.448676, 5.380319]) + assert np.allclose(res, expected) + + +def test_logs_normal(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_normal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_normal( + np.random.uniform(-10, 10, (4, 3)), + 3.2, + 4.1, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_normal( + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(-10, 10, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_normal(7, 4, -0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_normal(7, 4, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default mu + res0 = sr.logs_normal(-3, backend=backend) + res = sr.logs_normal(-3, mu=0, backend=backend) + assert np.isclose(res0, res) + + # default sigma + res = sr.logs_normal(-3, mu=0, sigma=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.logs_normal(-3 + 0.1, mu=0.1, sigma=1.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_normal(7.1, 3.8, 0.3, backend=backend) + expected = 60.21497 + assert np.isclose(res, expected) + + res = sr.logs_normal(3.1, 4.0, 0.5, backend=backend) + expected = 1.845791 + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_normal( + np.array([-2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([3.309898, 3.448482]) + assert np.allclose(res, expected) + + +def test_logs_2pnormal(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_2pnormal( + 17, + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_2pnormal( + np.random.uniform(-5, 5, (4, 3)), + 3.2, + 4.1, + 2.8, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_2pnormal( + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(-5, 5, (4, 3)), + np.random.uniform(0, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative scale1 parameter + with pytest.raises(ValueError): + sr.logs_2pnormal(7, -0.1, 0.1, backend=backend, check_pars=True) + return + + # zero scale1 parameter + with pytest.raises(ValueError): + sr.logs_2pnormal(7, 0.0, backend=backend, check_pars=True) + return + + # negative scale2 parameter + with pytest.raises(ValueError): + sr.logs_2pnormal(7, 0.1, -2.1, backend=backend, check_pars=True) + return + + # zero scale2 parameter + with pytest.raises(ValueError): + sr.logs_2pnormal(7, 1.1, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default scale1 + res0 = sr.logs_2pnormal(-3, backend=backend) + res = sr.logs_2pnormal(-3, scale1=1.0, backend=backend) + assert np.isclose(res0, res) + + # default scale2 + res = sr.logs_2pnormal(-3, scale1=1.0, scale2=1.0, backend=backend) + assert np.isclose(res0, res) + + # default location + res = sr.logs_2pnormal(-3, scale1=1.0, scale2=1.0, location=0.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.logs_2pnormal(-3 + 0.1, 1.0, 1.0, location=0.1, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_2pnormal(7.1, 3.8, 1.3, 2.0, backend=backend) + expected = 9.550298 + assert np.isclose(res, expected) + + # check equivalence with standard normal distribution + res0 = sr.logs_normal(3.1, 4.0, 0.5, backend=backend) + res = sr.logs_2pnormal(3.1, 0.5, 0.5, 4.0, backend=backend) + assert np.isclose(res0, res) + + # multiple observations + res = sr.logs_2pnormal( + np.array([-2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([6.116912, 8.046626]) + assert np.allclose(res, expected) + + +def test_logs_poisson(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_poisson( + 17, + np.random.uniform(0, 20, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_poisson( + np.random.uniform(-5, 10, (4, 3)), + 3.2, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_poisson( + np.random.uniform(-5, 10, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative mean parameter + with pytest.raises(ValueError): + sr.logs_poisson(7, -0.1, backend=backend, check_pars=True) + return + + # zero mean parameter + with pytest.raises(ValueError): + sr.logs_poisson(7, 0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # single observation + res = sr.logs_poisson(1.0, 3.0, backend=backend) + expected = 1.901388 + assert np.isclose(res, expected) + + res = sr.logs_poisson(1.5, 2.3, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_poisson(-1.0, 1.5, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_poisson(np.array([2.0, 4.0]), np.array([10.1, 1.2]), backend=backend) + expected = np.array([6.168076, 3.648768]) + assert np.allclose(res, expected) + + +def test_logs_t(backend): + if backend == "torch": + pytest.skip("Not implemented in torch backend") + + ## test shape + + # one observation, multiple forecasts + res = sr.logs_t( + 17, + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_t( + np.random.uniform(-5, 20, (4, 3)), + 12, + 12, + 12, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_t( + np.random.uniform(-5, 20, (4, 3)), + np.random.uniform(0, 20, (4, 3)), + np.random.uniform(10, 20, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # negative df parameter + with pytest.raises(ValueError): + sr.logs_t(7, -4, backend=backend, check_pars=True) + return + + # zero df parameter + with pytest.raises(ValueError): + sr.logs_t(7, 0, backend=backend, check_pars=True) + return + + # negative scale parameter + with pytest.raises(ValueError): + sr.logs_t(7, 4, scale=-0.1, backend=backend, check_pars=True) + return + + # zero scale parameter + with pytest.raises(ValueError): + sr.logs_t(7, 4, scale=0.0, backend=backend, check_pars=True) + return + + ## test correctness + + # default location + res0 = sr.logs_t(-3, 2.1, backend=backend) + res = sr.logs_t(-3, 2.1, location=0, backend=backend) + assert np.isclose(res0, res) + + # default scale + res = sr.logs_t(-3, 2.1, location=0, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.logs_t(-3 + 0.1, 2.1, location=0.1, scale=1.0, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_t(7.1, 4.2, 3.8, 0.3, backend=backend) + expected = 8.600513 + assert np.isclose(res, expected) + + # negative observation + res = sr.logs_t(-3.1, 2.9, 4.0, 0.5, backend=backend) + expected = 8.60978 + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_t( + np.array([-2.9, 4.4]), + np.array([5.1, 19.4]), + np.array([1.1, 1.8]), + np.array([10.1, 1.2]), + backend=backend, + ) + expected = np.array([3.372580, 3.324565]) + assert np.allclose(res, expected) + + +def test_logs_uniform(backend): + ## test shape + + # one observation, multiple forecasts + res = sr.logs_uniform( + 7, + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(5, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, one forecast + res = sr.logs_uniform( + np.random.uniform(0, 10, (4, 3)), + 1, + 7, + backend=backend, + ) + assert res.shape == (4, 3) + + # multiple observations, multiple forecasts + res = sr.logs_uniform( + np.random.uniform(0, 10, (4, 3)), + np.random.uniform(0, 5, (4, 3)), + np.random.uniform(5, 10, (4, 3)), + backend=backend, + ) + assert res.shape == (4, 3) + + ## test exceptions + + # min >= max + with pytest.raises(ValueError): + sr.logs_uniform(0.7, min=2, max=1, backend=backend, check_pars=True) + return + + ## test correctness + + # default min + res0 = sr.logs_uniform(0.7, backend=backend) + res = sr.logs_uniform(0.7, min=0, backend=backend) + assert np.isclose(res0, res) + + # default sigma + res = sr.logs_uniform(0.7, min=0, max=1.0, backend=backend) + assert np.isclose(res0, res) + + # invariance to location shifts + res = sr.logs_uniform(0.7 + 0.1, min=0.1, max=1.1, backend=backend) + assert np.isclose(res0, res) + + # single observation + res = sr.logs_uniform(0.3, -1.0, 2.1, backend=backend) + expected = 1.131402 + assert np.isclose(res, expected) + + res = sr.logs_uniform(2.2, 0.1, 3.1, backend=backend) expected = 1.098612 assert np.isclose(res, expected) + + # observation outside of interval range + res = sr.logs_uniform(-17.9, -15.2, -8.7, backend=backend) + expected = float("inf") + assert np.isclose(res, expected) + + # multiple observations + res = sr.logs_uniform( + np.array([2.9, 4.4]), + np.array([1.1, 1.8]), + np.array([10.1, 5.1]), + backend=backend, + ) + expected = np.array([2.197225, 1.193922]) + assert np.allclose(res, expected)