Skip to content

Commit 95c8544

Browse files
committed
[Math] Don't leak Minuit 2 implementation details to IFunction classes
The option to return external gradients in Minuit 2 internal space is only used in RooFit, and these flags should not be present in math function base classes. Refactor the code such that it's not needed.
1 parent 8b3836d commit 95c8544

File tree

8 files changed

+27
-31
lines changed

8 files changed

+27
-31
lines changed

math/mathcore/inc/Math/IFunction.h

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,6 @@ namespace ROOT {
9191
// if it inherits from ROOT::Math::IGradientFunctionMultiDim.
9292
virtual bool HasGradient() const { return false; }
9393

94-
virtual bool returnsInMinuit2ParameterSpace() const { return false; }
95-
9694
/// Evaluate all the vector of function derivatives (gradient) at a point x.
9795
/// Derived classes must re-implement it if more efficient than evaluating one at a time
9896
virtual void Gradient(const T *x, T *grad) const
@@ -103,17 +101,6 @@ namespace ROOT {
103101
}
104102
}
105103

106-
/// In some cases, the gradient algorithm will use information from the previous step, these can be passed
107-
/// in with this overload. The `previous_*` arrays can also be used to return second derivative and step size
108-
/// so that these can be passed forward again as well at the call site, if necessary.
109-
virtual void GradientWithPrevResult(const T *x, T *grad, T *previous_grad, T *previous_g2, T *previous_gstep) const
110-
{
111-
unsigned int ndim = NDim();
112-
for (unsigned int icoord = 0; icoord < ndim; ++icoord) {
113-
grad[icoord] = Derivative(x, icoord, previous_grad, previous_g2, previous_gstep);
114-
}
115-
}
116-
117104
/// Optimized method to evaluate at the same time the function value and derivative at a point x.
118105
/// Often both value and derivatives are needed and it is often more efficient to compute them at the same time.
119106
/// Derived class should implement this method if performances play an important role and if it is faster to

math/minuit2/inc/Minuit2/FCNBase.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,10 @@ class FCNBase {
113113
virtual bool HasGradient() const { return false; }
114114

115115
virtual std::vector<double> Gradient(std::vector<double> const&) const { return {}; }
116+
117+
/// In some cases, the gradient algorithm will use information from the previous step, these can be passed
118+
/// in with this overload. The `previous_*` arrays can also be used to return second derivative and step size
119+
/// so that these can be passed forward again as well at the call site, if necessary.
116120
virtual std::vector<double> GradientWithPrevResult(std::vector<double> const& parameters, double * /*previous_grad*/,
117121
double * /*previous_g2*/, double * /*previous_gstep*/) const
118122
{

math/minuit2/inc/Minuit2/Minuit2Minimizer.h

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,9 @@ class Minuit2Minimizer : public ROOT::Math::Minimizer {
279279
/// return the minimizer state (containing values, step size , etc..)
280280
const ROOT::Minuit2::MnUserParameterState &State() { return fState; }
281281

282+
const ROOT::Minuit2::FCNBase *GetFCN() const { return fMinuitFCN; }
283+
ROOT::Minuit2::FCNBase *GetFCN() { return fMinuitFCN; }
284+
282285
protected:
283286
// protected function for accessing the internal Minuit2 object. Needed for derived classes
284287

@@ -288,8 +291,6 @@ class Minuit2Minimizer : public ROOT::Math::Minimizer {
288291

289292
void SetMinimizerType(ROOT::Minuit2::EMinimizerType type);
290293

291-
virtual const ROOT::Minuit2::FCNBase *GetFCN() const { return fMinuitFCN; }
292-
293294
/// examine the minimum result
294295
bool ExamineMinimum(const ROOT::Minuit2::FunctionMinimum &min);
295296

math/minuit2/src/Minuit2Minimizer.cxx

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -399,13 +399,7 @@ void Minuit2Minimizer::SetFunction(const ROOT::Math::IMultiGenFunction &func)
399399
if (hasGrad) {
400400
auto const &gradFunc = dynamic_cast<ROOT::Math::IMultiGradFunction const &>(func);
401401
auto lambdaGrad = [&gradFunc](double const *params, double *grad) { return gradFunc.Gradient(params, grad); };
402-
auto lambdaGradWithPrevResult = [&gradFunc](const double *params, double *grad, double *previous_grad,
403-
double *previous_g2, double *previous_gstep) {
404-
gradFunc.GradientWithPrevResult(params, grad, previous_grad, previous_g2, previous_gstep);
405-
};
406402
adapter->SetGradientFunction(lambdaGrad);
407-
adapter->SetGradWithPrevResultFunction(lambdaGradWithPrevResult);
408-
adapter->SetReturnsInMinuit2ParameterSpace(gradFunc.returnsInMinuit2ParameterSpace());
409403
}
410404
fMinuitFCN = adapter;
411405
} else {

roofit/roofitcore/src/RooAbsMinimizerFcn.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@ class RooAbsMinimizerFcn {
3737
RooAbsMinimizerFcn(RooArgList paramList, RooMinimizer *context);
3838
virtual ~RooAbsMinimizerFcn() = default;
3939

40+
virtual bool returnsInMinuit2ParameterSpace() const { return false; }
41+
4042
/// Informs Minuit through its parameter_settings vector of RooFit parameter properties.
4143
bool synchronizeParameterSettings(std::vector<ROOT::Fit::ParameterSettings> &parameters, bool optConst);
4244
/// Like synchronizeParameterSettings, Synchronize informs Minuit through

roofit/roofitcore/src/RooMinimizer.cxx

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,11 @@ automatic PDF optimization.
6767
#include <TGraph.h>
6868
#include <TMarker.h>
6969

70+
#ifdef ROOFIT_MULTIPROCESS
71+
#include <Minuit2/Minuit2Minimizer.h>
72+
#include <Minuit2/FCNAdapter.h>
73+
#endif
74+
7075
#include <fstream>
7176
#include <iostream>
7277
#include <stdexcept> // logic_error
@@ -1135,6 +1140,18 @@ void RooMinimizer::initMinimizer()
11351140
{
11361141
_minimizer = std::unique_ptr<ROOT::Math::Minimizer>(_config.CreateMinimizer());
11371142
_minimizer->SetFunction(*getMultiGenFcn());
1143+
#ifdef ROOFIT_MULTIPROCESS
1144+
if (_fcn->returnsInMinuit2ParameterSpace()) {
1145+
auto &rooFcn = dynamic_cast<RooFit::TestStatistics::MinuitFcnGrad &>(*_fcn);
1146+
auto &minim = dynamic_cast<ROOT::Minuit2::Minuit2Minimizer &>(*_minimizer);
1147+
auto &adapter = dynamic_cast<ROOT::Minuit2::FCNAdapter &>(*minim.GetFCN());
1148+
adapter.SetGradWithPrevResultFunction([&rooFcn](const double *params, double *grad, double *previous_grad,
1149+
double *previous_g2, double *previous_gstep) {
1150+
rooFcn.GradientWithPrevResult(params, grad, previous_grad, previous_g2, previous_gstep);
1151+
});
1152+
adapter.SetReturnsInMinuit2ParameterSpace(true);
1153+
}
1154+
#endif
11381155
_minimizer->SetVariables(_config.ParamsSettings().begin(), _config.ParamsSettings().end());
11391156

11401157
if (_cfg.setInitialCovariance) {

roofit/roofitcore/src/TestStatistics/MinuitFcnGrad.cxx

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -35,14 +35,6 @@ class MinuitGradFunctor : public ROOT::Math::IMultiGradFunction {
3535

3636
void Gradient(const double *x, double *grad) const override { return _fcn.Gradient(x, grad); }
3737

38-
void GradientWithPrevResult(const double *x, double *grad, double *previous_grad, double *previous_g2,
39-
double *previous_gstep) const override
40-
{
41-
return _fcn.GradientWithPrevResult(x, grad, previous_grad, previous_g2, previous_gstep);
42-
}
43-
44-
bool returnsInMinuit2ParameterSpace() const override { return _fcn.returnsInMinuit2ParameterSpace(); }
45-
4638
private:
4739
double DoEval(const double *x) const override { return _fcn(x); }
4840

roofit/roofitcore/src/TestStatistics/MinuitFcnGrad.h

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,7 @@ class MinuitFcnGrad : public RooAbsMinimizerFcn {
3737
/// Overridden from RooAbsMinimizerFcn to include gradient strategy synchronization.
3838
bool Synchronize(std::vector<ROOT::Fit::ParameterSettings> &parameter_settings) override;
3939

40-
// used inside Minuit:
41-
inline bool returnsInMinuit2ParameterSpace() const { return _gradient->usesMinuitInternalValues(); }
40+
bool returnsInMinuit2ParameterSpace() const override { return _gradient->usesMinuitInternalValues(); }
4241

4342
inline void setOptimizeConstOnFunction(RooAbsArg::ConstOpCode opcode, bool doAlsoTrackingOpt) override
4443
{

0 commit comments

Comments
 (0)