Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tmva/tmva/inc/TMVA/Factory.h
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ namespace TMVA {
TString /*compositeOption = ""*/ ) { return 0; }

// optimize all booked methods (well, if desired by the method)
void OptimizeAllMethods (TString fomType="ROCIntegral", TString fitType="FitGA");
std::map<TString,Double_t> OptimizeAllMethods (TString fomType="ROCIntegral", TString fitType="FitGA");
void OptimizeAllMethodsForClassification(TString fomType="ROCIntegral", TString fitType="FitGA") { OptimizeAllMethods(fomType,fitType); }
void OptimizeAllMethodsForRegression (TString fomType="ROCIntegral", TString fitType="FitGA") { OptimizeAllMethods(fomType,fitType); }

Expand Down
47 changes: 41 additions & 6 deletions tmva/tmva/inc/TMVA/MethodSVM.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,11 @@
#include "TVectorD.h"
#endif
#endif

#ifndef ROOT_TMVA_SVKernelFunction
#include "TMVA/SVKernelFunction.h"
#endif
#include <vector>
#include <string>
namespace TMVA
{
class SVWorkingSet;
Expand All @@ -73,9 +77,18 @@ namespace TMVA
virtual ~MethodSVM( void );

virtual Bool_t HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets );

// optimise tuning parameters
virtual std::map<TString,Double_t> OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="Minuit");
virtual void SetTuneParameters(std::map<TString,Double_t> tuneParameters);
std::vector<TMVA::SVKernelFunction::EKernelType> MakeKernelList(std::string multiKernels, TString kernel);
std::map< TString,std::vector<Double_t> > GetTuningOptions();

// training method
void Train( void );

// revoke training (required for optimise tuning parameters)
void Reset( void );

using MethodBase::ReadWeightsFromStream;

Expand All @@ -93,9 +106,20 @@ namespace TMVA
const std::vector<Float_t>& GetRegressionValues();

void Init( void );

void InitHistos();
// ranking of input variables
const Ranking* CreateRanking() { return 0; }
const Ranking* CreateRanking() { return 0; }

// for SVM optimisation
void SetGamma(Double_t g){fGamma = g;}
void SetCost(Double_t c){fCost = c;}
void SetMGamma(std::string & mg);
void SetOrder(Double_t o){fOrder = o;}
void SetTheta(Double_t t){fTheta = t;}
void SetKappa(Double_t k){fKappa = k;}
void SetMult(Double_t m){fMult = m;}

void GetMGamma(const std::vector<float> & gammas);

protected:

Expand All @@ -106,11 +130,11 @@ namespace TMVA
void GetHelpMessage() const;

private:

// the option handling methods
void DeclareOptions();
void DeclareCompatibilityOptions();
void ProcessOptions();
Double_t getLoss( TString lossFunction );

Float_t fCost; // cost value
Float_t fTolerance; // tolerance parameter
Expand All @@ -126,13 +150,24 @@ namespace TMVA
TVectorD* fMinVars; // for normalization //is it still needed??
TVectorD* fMaxVars; // for normalization //is it still needed??

// for backward compatibility
// for kernel functions
TString fTheKernel; // kernel name
Float_t fDoubleSigmaSquared; // for RBF Kernel
Int_t fOrder; // for Polynomial Kernel ( polynomial order )
Float_t fTheta; // for Sigmoidal Kernel
Float_t fKappa; // for Sigmoidal Kernel

Float_t fMult;
std::vector<Float_t> fmGamma; // vector of gammas for multi-gaussian kernel
Float_t fNumVars; // number of input variables for multi-gaussian
std::vector<TString> fVarNames;
std::string fGammas;
std::string fGammaList;
std::string fTune; // Specify parameters to be tuned
std::string fMultiKernels;

Int_t fDataSize;
TString fLoss;

ClassDef(MethodSVM,0) // Support Vector Machine
};

Expand Down
14 changes: 12 additions & 2 deletions tmva/tmva/inc/TMVA/SVKernelFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,25 +39,35 @@ namespace TMVA {

public:

enum EKernelType { kLinear , kRBF, kPolynomial, kSigmoidal, kMultiGauss, kProd, kSum};

SVKernelFunction();
SVKernelFunction( Float_t );
SVKernelFunction( EKernelType, Float_t, Float_t=0);
SVKernelFunction( std::vector<float> params );
SVKernelFunction(EKernelType k, std::vector<EKernelType> kernels, std::vector<Float_t> gammas, Float_t gamma, Float_t order, Float_t theta);
~SVKernelFunction();

Float_t Evaluate( SVEvent* ev1, SVEvent* ev2 );

enum EKernelType { kLinear , kRBF, kPolynomial, kSigmoidal };
// enum EKernelType { kLinear , kRBF, kPolynomial, kSigmoidal };

void setCompatibilityParams(EKernelType k, UInt_t order, Float_t theta, Float_t kappa);

private:

Float_t fGamma; // documentation


// TJS add vector of gammas for multidimensional gaussian
std::vector<Float_t> fmGamma;

// kernel, order, theta, and kappa are for backward compatibility
EKernelType fKernel;
UInt_t fOrder;
Float_t fTheta;
Float_t fKappa;

std::vector<EKernelType> fKernelsList;
};
}

Expand Down
10 changes: 6 additions & 4 deletions tmva/tmva/src/Factory.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -928,18 +928,18 @@ void TMVA::Factory::WriteDataInformation()
/// keeps in mind the "optimal one"... and that's the one that will later on be used
/// in the main training loop.

void TMVA::Factory::OptimizeAllMethods(TString fomType, TString fitType)
std::map<TString,Double_t> TMVA::Factory::OptimizeAllMethods(TString fomType, TString fitType)
{

MVector::iterator itrMethod;

std::map<TString,Double_t> TunedParameters;
// iterate over methods and optimize
for( itrMethod = fMethods.begin(); itrMethod != fMethods.end(); ++itrMethod ) {
Event::SetIsTraining(kTRUE);
MethodBase* mva = dynamic_cast<MethodBase*>(*itrMethod);
if (!mva) {
Log() << kFATAL << "Dynamic cast to MethodBase failed" <<Endl;
return;
return TunedParameters;
}

if (mva->Data()->GetNTrainingEvents() < MinNoTrainingEvents) {
Expand All @@ -954,9 +954,11 @@ void TMVA::Factory::OptimizeAllMethods(TString fomType, TString fitType)
<< (fAnalysisType == Types::kRegression ? "Regression" :
(fAnalysisType == Types::kMulticlass ? "Multiclass classification" : "Classification")) << Endl;

mva->OptimizeTuningParameters(fomType,fitType);
TunedParameters = mva->OptimizeTuningParameters(fomType,fitType);
Log() << kINFO << "Optimization of tuning paremters finished for Method:"<<mva->GetName() << Endl;
}
return TunedParameters;

}

////////////////////////////////////////////////////////////////////////////////
Expand Down
Loading