Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions math/mathcore/inc/Math/Util.h
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ namespace ROOT {
public:
/// Initialise the sum.
/// \param[in] initialValue Initialise with this value. Defaults to 0.
KahanSum(T initialValue = T{}) {
explicit KahanSum(T initialValue = T{}) {
fSum[0] = initialValue;
std::fill(std::begin(fSum)+1, std::end(fSum), 0.);
std::fill(std::begin(fCarry), std::end(fCarry), 0.);
Expand Down Expand Up @@ -246,11 +246,6 @@ namespace ROOT {
return Sum();
}

/// Auto-convert to type T
operator T() const {
return Sum();
}

/// \return The sum used for compensation.
T Carry() const {
return std::accumulate(std::begin(fCarry), std::end(fCarry), 0.);
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/inc/RooAbsTestStatistic.h
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ class RooAbsTestStatistic : public RooAbsReal {
RooFit::MPSplit _mpinterl = RooFit::BulkPartition; ///< Use interleaving strategy rather than N-wise split for partioning of dataset for multiprocessor-split
bool _doOffset = false; ///< Apply interval value offset to control numeric precision?
const bool _takeGlobalObservablesFromData = false; ///< If the global observable values are taken from data
mutable ROOT::Math::KahanSum<double> _offset = 0.0; ///<! Offset as KahanSum to avoid loss of precision
mutable ROOT::Math::KahanSum<double> _offset {0.0}; ///<! Offset as KahanSum to avoid loss of precision
mutable double _evalCarry = 0.0; ///<! carry of Kahan sum in evaluatePartition

ClassDefOverride(RooAbsTestStatistic,0) // Abstract base class for real-valued test statistics
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class LikelihoodWrapper {

bool do_offset_ = false;
ROOT::Math::KahanSum<double> offset_;
ROOT::Math::KahanSum<double> offset_save_ = 0; ///<!
ROOT::Math::KahanSum<double> offset_save_ {0.}; ///<!
OffsettingMode offsetting_mode_ = OffsettingMode::legacy;
ROOT::Math::KahanSum<double> applyOffsetting(ROOT::Math::KahanSum<double> current_value);
void swapOffsets();
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/inc/RooFit/TestStatistics/RooBinnedL.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class RooBinnedL : public RooAbsL {
mutable std::vector<double> _binw; ///<!
std::unique_ptr<RooChangeTracker> paramTracker_;
Section lastSection_ = {0, 0}; // used for cache together with the parameter tracker
mutable ROOT::Math::KahanSum<double> cachedResult_ = 0;
mutable ROOT::Math::KahanSum<double> cachedResult_ {0.};
};

} // namespace TestStatistics
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class RooUnbinnedL : public RooAbsL {
bool useBatchedEvaluations_ = false;
std::unique_ptr<RooChangeTracker> paramTracker_;
Section lastSection_ = {0, 0}; // used for cache together with the parameter tracker
mutable ROOT::Math::KahanSum<double> cachedResult_ = 0;
mutable ROOT::Math::KahanSum<double> cachedResult_ {0.};
};

} // namespace TestStatistics
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/src/RooAbsData.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -1021,7 +1021,7 @@ double RooAbsData::moment(const RooRealVar& var, double order, double offset, co
sum += weight() * TMath::Power(varPtr->getVal() - offset,order);
}

return sum/sumEntries(cutSpec, cutRange);
return sum.Sum()/sumEntries(cutSpec, cutRange);
}

////////////////////////////////////////////////////////////////////////////////
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/src/RooAbsTestStatistic.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ void RooAbsTestStatistic::enableOffsetting(bool flag)
_doOffset = flag ;
// Clear offset if feature is disabled to that it is recalculated next time it is enabled
if (!_doOffset) {
_offset = 0 ;
_offset = ROOT::Math::KahanSum<double>{0.} ;
}
setValueDirty() ;
break ;
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/src/RooBinIntegrator.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ double RooBinIntegrator::integral(const double *)
}
}

return sum;
return sum.Sum();
}


2 changes: 1 addition & 1 deletion roofit/roofitcore/src/RooCurve.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -591,7 +591,7 @@ double RooCurve::chiSquare(const RooHist& hist, Int_t nFitParam) const
}

// Return chisq/nDOF
return chisq / (nbin-nFitParam) ;
return chisq.Sum() / (nbin-nFitParam) ;
}


Expand Down
12 changes: 6 additions & 6 deletions roofit/roofitcore/src/RooDataHist.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -1812,9 +1812,9 @@ double RooDataHist::sum(bool correctForBinSize, bool inverseBinCor) const

// Store result in cache
_cache_sum_valid = cache_code;
_cache_sum = kahanSum;
_cache_sum = kahanSum.Sum();

return kahanSum;
return kahanSum.Sum();
}


Expand Down Expand Up @@ -1888,7 +1888,7 @@ double RooDataHist::sum(const RooArgSet& sumSet, const RooArgSet& sliceSet, bool

_vars.assign(varSave) ;

return total;
return total.Sum();
}

////////////////////////////////////////////////////////////////////////////////
Expand Down Expand Up @@ -1997,7 +1997,7 @@ double RooDataHist::sum(const RooArgSet& sumSet, const RooArgSet& sliceSet,

_vars.assign(varSave);

return total;
return total.Sum();
}


Expand Down Expand Up @@ -2055,7 +2055,7 @@ const std::vector<double>& RooDataHist::calculatePartialBinVolume(const RooArgSe
////////////////////////////////////////////////////////////////////////////////
/// Sum the weights of all bins.
double RooDataHist::sumEntries() const {
return ROOT::Math::KahanSum<double>::Accumulate(_wgt, _wgt + _arrSize);
return ROOT::Math::KahanSum<double>::Accumulate(_wgt, _wgt + _arrSize).Sum();
}


Expand Down Expand Up @@ -2089,7 +2089,7 @@ double RooDataHist::sumEntries(const char* cutSpec, const char* cutRange) const
kahanSum += weight(i);
}

return kahanSum;
return kahanSum.Sum();
}
}

Expand Down
8 changes: 4 additions & 4 deletions roofit/roofitcore/src/RooNLLVar.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -334,8 +334,8 @@ double RooNLLVar::evaluatePartition(std::size_t firstEvent, std::size_t lastEven
if (_doOffset) {

// If no offset is stored enable this feature now
if (_offset==0 && result !=0 ) {
coutI(Minimization) << "RooNLLVar::evaluatePartition(" << GetName() << ") first = "<< firstEvent << " last = " << lastEvent << " Likelihood offset now set to " << result << std::endl ;
if (_offset.Sum() == 0 && _offset.Carry() == 0 && (result.Sum() != 0 || result.Carry() != 0)) {
coutI(Minimization) << "RooNLLVar::evaluatePartition(" << GetName() << ") first = "<< firstEvent << " last = " << lastEvent << " Likelihood offset now set to " << result.Sum() << std::endl ;
_offset = result ;
}

Expand Down Expand Up @@ -454,7 +454,7 @@ RooNLLVar::ComputeResult RooNLLVar::computeBatchedFunc(const RooAbsPdf *pdfClone

// Some events with evaluation errors. Return "badness" of errors.
if (nanPacker.getPayload() > 0.) {
return {{nanPacker.getNaNWithPayload()}, sumOfWeights};
return {ROOT::Math::KahanSum<double>{nanPacker.getNaNWithPayload()}, sumOfWeights};
} else {
return {kahanSanitised, sumOfWeights};
}
Expand Down Expand Up @@ -505,7 +505,7 @@ RooNLLVar::ComputeResult RooNLLVar::computeScalarFunc(const RooAbsPdf *pdfClone,

if (packedNaN.getPayload() != 0.) {
// Some events with evaluation errors. Return "badness" of errors.
return {{packedNaN.getNaNWithPayload()}, kahanWeight.Sum()};
return {ROOT::Math::KahanSum<double>{packedNaN.getNaNWithPayload()}, kahanWeight.Sum()};
}

return {kahanProb, kahanWeight.Sum()};
Expand Down
8 changes: 4 additions & 4 deletions roofit/roofitcore/src/RooNLLVarNew.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ void RooNLLVarNew::computeBatch(cudaStream_t * /*stream*/, double *output, size_

if (packedNaN.getPayload() != 0.) {
// Some events with evaluation errors. Return "badness" of errors.
kahanProb = packedNaN.getNaNWithPayload();
kahanProb = Math::KahanSum<double>(packedNaN.getNaNWithPayload());
}

if (_isExtended) {
Expand Down Expand Up @@ -279,7 +279,7 @@ RooNLLVarNew::fillNormSetForServer(RooArgSet const & /*normSet*/, RooAbsArg cons
void RooNLLVarNew::enableOffsetting(bool flag)
{
_doOffset = flag;
_offset = {};
_offset = ROOT::Math::KahanSum<double>{};
}

double RooNLLVarNew::finalizeResult(ROOT::Math::KahanSum<double> &&result, double weightSum) const
Expand All @@ -291,10 +291,10 @@ double RooNLLVarNew::finalizeResult(ROOT::Math::KahanSum<double> &&result, doubl
}

// Check if value offset flag is set.
if (_offset) {
if (_doOffset) {

// If no offset is stored enable this feature now
if (_offset == 0 && result != 0) {
if (_offset.Sum() == 0 && _offset.Carry() == 0 && (result.Sum() != 0 || result.Carry() != 0)) {
_offset = result;
}

Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/src/RooNLLVarNew.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ class RooNLLVarNew : public RooAbsReal {
RooTemplateProxy<RooAbsReal> _weightSquaredVar;
mutable std::vector<double> _binw; ///<!
mutable std::vector<double> _logProbasBuffer; ///<!
mutable ROOT::Math::KahanSum<double> _offset = 0.0; ///<! Offset as KahanSum to avoid loss of precision
mutable ROOT::Math::KahanSum<double> _offset {0.}; ///<! Offset as KahanSum to avoid loss of precision

}; // end class RooNLLVar

Expand Down
3 changes: 2 additions & 1 deletion roofit/roofitcore/src/TestStatistics/LikelihoodJob.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,8 @@ void LikelihoodJob::evaluate()
// wait for task results back from workers to master
gather_worker_results();

result_ = 0;
result_ = ROOT::Math::KahanSum<double>{0.};
// printf("Master evaluate: ");
for (auto const &item : results_) {
result_ += item;
}
Expand Down
10 changes: 5 additions & 5 deletions roofit/roofitcore/src/TestStatistics/LikelihoodWrapper.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ void LikelihoodWrapper::enableOffsetting(bool flag)
do_offset_ = flag;
// Clear offset if feature is disabled so that it is recalculated next time it is enabled
if (!do_offset_) {
offset_ = {};
offset_ = ROOT::Math::KahanSum<double>();
}
}

Expand All @@ -97,7 +97,7 @@ void LikelihoodWrapper::setOffsettingMode(OffsettingMode mode)
oocoutI(nullptr, Minimization)
<< "LikelihoodWrapper::setOffsettingMode(" << GetName()
<< "): changed offsetting mode while offsetting was enabled; resetting offset values" << std::endl;
offset_ = {};
offset_ = ROOT::Math::KahanSum<double>();
}
}

Expand All @@ -106,7 +106,7 @@ ROOT::Math::KahanSum<double> LikelihoodWrapper::applyOffsetting(ROOT::Math::Kaha
if (do_offset_) {

// If no offset is stored enable this feature now
if (offset_ == 0 && current_value != 0) {
if (offset_.Sum() == 0 && offset_.Carry() == 0 && (current_value.Sum() != 0 || current_value.Carry() != 0)) {
offset_ = current_value;
if (offsetting_mode_ == OffsettingMode::legacy) {
auto sum_likelihood = dynamic_cast<RooSumL *>(likelihood_.get());
Expand All @@ -115,11 +115,11 @@ ROOT::Math::KahanSum<double> LikelihoodWrapper::applyOffsetting(ROOT::Math::Kaha
// "undo" the addition of the subsidiary value to emulate legacy behavior
offset_ -= subsidiary_value;
// manually calculate result with zero carry, again to emulate legacy behavior
return {current_value.Result() - offset_.Result()};
return ROOT::Math::KahanSum<double>{current_value.Result() - offset_.Result()};
}
}
oocoutI(nullptr, Minimization)
<< "LikelihoodWrapper::applyOffsetting(" << GetName() << "): Likelihood offset now set to " << offset_
<< "LikelihoodWrapper::applyOffsetting(" << GetName() << "): Likelihood offset now set to " << offset_.Sum()
<< std::endl;
}

Expand Down
4 changes: 2 additions & 2 deletions roofit/roofitcore/src/TestStatistics/RooBinnedL.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ RooBinnedL::evaluatePartition(Section bins, std::size_t /*components_begin*/, st
ROOT::Math::KahanSum<double> result;

// Do not reevaluate likelihood if parameters nor event range have changed
if (!paramTracker_->hasChanged(true) && bins == lastSection_ && (cachedResult_ != 0)) return cachedResult_;
if (!paramTracker_->hasChanged(true) && bins == lastSection_ && (cachedResult_.Sum() != 0 || cachedResult_.Carry() != 0)) return cachedResult_;

// data->store()->recalculateCache(_projDeps, firstEvent, lastEvent, stepSize, (_binnedPdf?false:true));
// TODO: check when we might need _projDeps (it seems to be mostly empty); ties in with TODO below
Expand Down Expand Up @@ -137,7 +137,7 @@ RooBinnedL::evaluatePartition(Section bins, std::size_t /*components_begin*/, st
// If part of simultaneous PDF normalize probability over
// number of simultaneous PDFs: -sum(log(p/n)) = -sum(log(p)) + N*log(n)
if (sim_count_ > 1) {
result += sumWeight * log(1.0 * sim_count_);
result += sumWeight.Sum() * log(1.0 * sim_count_);
}

// At the end of the first full calculation, wire the caches
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/src/TestStatistics/RooSumL.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ ROOT::Math::KahanSum<double> RooSumL::getSubsidiaryValue()
return (*component)->evaluatePartition({0, 1}, 0, 0);
}
}
return {};
return ROOT::Math::KahanSum<double>{};
}

void RooSumL::constOptimizeTestStatistic(RooAbsArg::ConstOpCode opcode, bool doAlsoTrackingOpt)
Expand Down
2 changes: 1 addition & 1 deletion roofit/roofitcore/src/TestStatistics/RooUnbinnedL.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ RooUnbinnedL::evaluatePartition(Section events, std::size_t /*components_begin*/
double sumWeight;

// Do not reevaluate likelihood if parameters nor event range have changed
if (!paramTracker_->hasChanged(true) && events == lastSection_ && (cachedResult_ != 0)) return cachedResult_;
if (!paramTracker_->hasChanged(true) && events == lastSection_ && (cachedResult_.Sum() != 0 || cachedResult_.Carry() != 0)) return cachedResult_;

data_->store()->recalculateCache(nullptr, events.begin(N_events_), events.end(N_events_), 1, true);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ TEST(SimBinnedConstrainedTestBasic, BasicParameters)
nll_ts->evaluate();
auto nll1 = nll_ts->getResult();

EXPECT_DOUBLE_EQ(nll0, nll1);
EXPECT_DOUBLE_EQ(nll0, nll1.Sum());
}

class SimBinnedConstrainedTest : public ::testing::TestWithParam<std::tuple<bool>> {};
Expand Down
Loading