/usr/include/root/TMultiLayerPerceptron.h is in libroot-math-mlp-dev 5.34.14-1build1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 | // @(#)root/mlp:$Id$
// Author: Christophe.Delaere@cern.ch 20/07/03
/*************************************************************************
* Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
* All rights reserved. *
* *
* For the licensing terms see $ROOTSYS/LICENSE. *
* For the list of contributors see $ROOTSYS/README/CREDITS. *
*************************************************************************/
#ifndef ROOT_TMultiLayerPerceptron
#define ROOT_TMultiLayerPerceptron
#ifndef ROOT_TObject
#include "TObject.h"
#endif
#ifndef ROOT_TString
#include "TString.h"
#endif
#ifndef ROOT_TObjArray
#include "TObjArray.h"
#endif
#ifndef ROOT_TMatrixD
#include "TMatrixD.h"
#endif
#ifndef ROOT_TNeuron
#include "TNeuron.h"
#endif
class TTree;
class TEventList;
class TTreeFormula;
class TTreeFormulaManager;
//____________________________________________________________________
//
// TMultiLayerPerceptron
//
// This class decribes a Neural network.
// There are facilities to train the network and use the output.
//
// The input layer is made of inactive neurons (returning the
// normalized input), hidden layers are made of sigmoids and output
// neurons are linear.
//
// The basic input is a TTree and two (training and test) TEventLists.
// For classification jobs, a branch (maybe in a TFriend) must contain
// the expected output.
// 6 learning methods are available: kStochastic, kBatch,
// kSteepestDescent, kRibierePolak, kFletcherReeves and kBFGS.
//
// This implementation is *inspired* from the mlpfit package from
// J.Schwindling et al.
//
//____________________________________________________________________
class TMultiLayerPerceptron : public TObject {
friend class TMLPAnalyzer;
public:
enum ELearningMethod { kStochastic, kBatch, kSteepestDescent,
kRibierePolak, kFletcherReeves, kBFGS };
enum EDataSet { kTraining, kTest };
TMultiLayerPerceptron();
TMultiLayerPerceptron(const char* layout, TTree* data = 0,
const char* training = "Entry$%2==0",
const char* test = "",
TNeuron::ENeuronType type = TNeuron::kSigmoid,
const char* extF = "", const char* extD = "");
TMultiLayerPerceptron(const char* layout,
const char* weight, TTree* data = 0,
const char* training = "Entry$%2==0",
const char* test = "",
TNeuron::ENeuronType type = TNeuron::kSigmoid,
const char* extF = "", const char* extD = "");
TMultiLayerPerceptron(const char* layout, TTree* data,
TEventList* training,
TEventList* test,
TNeuron::ENeuronType type = TNeuron::kSigmoid,
const char* extF = "", const char* extD = "");
TMultiLayerPerceptron(const char* layout,
const char* weight, TTree* data,
TEventList* training,
TEventList* test,
TNeuron::ENeuronType type = TNeuron::kSigmoid,
const char* extF = "", const char* extD = "");
virtual ~TMultiLayerPerceptron();
void SetData(TTree*);
void SetTrainingDataSet(TEventList* train);
void SetTestDataSet(TEventList* test);
void SetTrainingDataSet(const char* train);
void SetTestDataSet(const char* test);
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method);
void SetEventWeight(const char*);
void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE=0);
Double_t Result(Int_t event, Int_t index = 0) const;
Double_t GetError(Int_t event) const;
Double_t GetError(TMultiLayerPerceptron::EDataSet set) const;
void ComputeDEDw() const;
void Randomize() const;
void SetEta(Double_t eta);
void SetEpsilon(Double_t eps);
void SetDelta(Double_t delta);
void SetEtaDecay(Double_t ed);
void SetTau(Double_t tau);
void SetReset(Int_t reset);
inline Double_t GetEta() const { return fEta; }
inline Double_t GetEpsilon() const { return fEpsilon; }
inline Double_t GetDelta() const { return fDelta; }
inline Double_t GetEtaDecay() const { return fEtaDecay; }
inline Double_t GetTau() const { return fTau; }
inline Int_t GetReset() const { return fReset; }
inline TString GetStructure() const { return fStructure; }
inline TNeuron::ENeuronType GetType() const { return fType; }
void DrawResult(Int_t index = 0, Option_t* option = "test") const;
Bool_t DumpWeights(Option_t* filename = "-") const;
Bool_t LoadWeights(Option_t* filename = "");
Double_t Evaluate(Int_t index, Double_t* params) const;
void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const;
virtual void Draw(Option_t *option="");
protected:
void AttachData();
void BuildNetwork();
void GetEntry(Int_t) const;
// it's a choice not to force learning function being const, even if possible
void MLP_Stochastic(Double_t*);
void MLP_Batch(Double_t*);
Bool_t LineSearch(Double_t*, Double_t*);
void SteepestDir(Double_t*);
void ConjugateGradientsDir(Double_t*, Double_t);
void SetGammaDelta(TMatrixD&, TMatrixD&, Double_t*);
bool GetBFGSH(TMatrixD&, TMatrixD &, TMatrixD&);
void BFGSDir(TMatrixD&, Double_t*);
Double_t DerivDir(Double_t*);
Double_t GetCrossEntropyBinary() const;
Double_t GetCrossEntropy() const;
Double_t GetSumSquareError() const;
private:
TMultiLayerPerceptron(const TMultiLayerPerceptron&); // Not implemented
TMultiLayerPerceptron& operator=(const TMultiLayerPerceptron&); // Not implemented
void ExpandStructure();
void BuildFirstLayer(TString&);
void BuildHiddenLayers(TString&);
void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
Int_t& prevStart, Int_t& prevStop,
Bool_t lastLayer);
void BuildLastLayer(TString&, Int_t);
void Shuffle(Int_t*, Int_t) const;
void MLP_Line(Double_t*, Double_t*, Double_t);
TTree* fData; //! pointer to the tree used as datasource
Int_t fCurrentTree; //! index of the current tree in a chain
Double_t fCurrentTreeWeight; //! weight of the current tree in a chain
TObjArray fNetwork; // Collection of all the neurons in the network
TObjArray fFirstLayer; // Collection of the input neurons; subset of fNetwork
TObjArray fLastLayer; // Collection of the output neurons; subset of fNetwork
TObjArray fSynapses; // Collection of all the synapses in the network
TString fStructure; // String containing the network structure
TString fWeight; // String containing the event weight
TNeuron::ENeuronType fType; // Type of hidden neurons
TNeuron::ENeuronType fOutType; // Type of output neurons
TString fextF; // String containing the function name
TString fextD; // String containing the derivative name
TEventList *fTraining; //! EventList defining the events in the training dataset
TEventList *fTest; //! EventList defining the events in the test dataset
ELearningMethod fLearningMethod; //! The Learning Method
TTreeFormula* fEventWeight; //! formula representing the event weight
TTreeFormulaManager* fManager; //! TTreeFormulaManager for the weight and neurons
Double_t fEta; //! Eta - used in stochastic minimisation - Default=0.1
Double_t fEpsilon; //! Epsilon - used in stochastic minimisation - Default=0.
Double_t fDelta; //! Delta - used in stochastic minimisation - Default=0.
Double_t fEtaDecay; //! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
Double_t fTau; //! Tau - used in line search - Default=3.
Double_t fLastAlpha; //! internal parameter used in line search
Int_t fReset; //! number of epochs between two resets of the search direction to the steepest descent - Default=50
Bool_t fTrainingOwner; //! internal flag whether one has to delete fTraining or not
Bool_t fTestOwner; //! internal flag whether one has to delete fTest or not
ClassDef(TMultiLayerPerceptron, 4) // a Neural Network
};
#endif
|