#ifndef ROOT_TMVA_MethodCFMlpANN_Utils
#define ROOT_TMVA_MethodCFMlpANN_Utils
#ifndef ROOT_TMVA_MethodCFMlpANN_def
#include "TMVA/MethodCFMlpANN_def.h"
#endif
#ifndef ROOT_TObject
#include "TObject.h"
#endif
namespace TMVA {
class MethodCFMlpANN_Utils {
public:
MethodCFMlpANN_Utils();
virtual ~MethodCFMlpANN_Utils();
protected:
void Train_nn( Double_t *tin2, Double_t *tout2, Int_t *ntrain,
Int_t *ntest, Int_t *nvar2, Int_t *nlayer,
Int_t *nodes, Int_t *ncycle );
void Entree_new( Int_t *, char *, Int_t *ntrain, Int_t *ntest,
Int_t *numlayer, Int_t *nodes, Int_t *numcycle,
Int_t );
virtual Int_t DataInterface( Double_t*, Double_t*, Int_t*, Int_t*, Int_t*, Int_t*,
Double_t*, Int_t*, Int_t* ) = 0;
Double_t Fdecroi(Int_t *i__);
Double_t Sen3a(void);
void Wini();
void En_avant(Int_t *ievent);
void En_avant2(Int_t *ievent);
void En_arriere(Int_t *ievent);
void Leclearn(Int_t *ktest, Double_t *tout2, Double_t *tin2);
void Out(Int_t *iii, Int_t *maxcycle);
void Cout(Int_t *, Double_t *xxx);
void Innit(char *det, Double_t *tout2, Double_t *tin2, Int_t );
void TestNN();
void Inl();
void GraphNN(Int_t *ilearn, Double_t *, Double_t *, char *, Int_t);
void Foncf(Int_t *i__, Double_t *u, Double_t *f);
void Cout2(Int_t * , Double_t *yyy);
void Lecev2(Int_t *ktest, Double_t *tout2, Double_t *tin2);
void Arret(const char* mot );
void CollectVar(Int_t *nvar, Int_t *class__, Double_t *xpg);
protected:
static Int_t fg_100;
static Int_t fg_0;
static Int_t fg_max_nVar_;
static Int_t fg_max_nNodes_;
static Int_t fg_999;
Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const {
return wNN [(a_3*max_nNodes_ + a_2)*max_nLayers_ + a_1 - 187];
}
Double_t& W_ref(Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) {
return wNN [((a_3)*max_nNodes_ + (a_2))*max_nLayers_ + a_1 - 187];
}
Double_t Ww_ref(const Double_t wwNN[], Int_t a_1,Int_t a_2) const {
return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
}
Double_t& Ww_ref(Double_t wwNN[], Int_t a_1,Int_t a_2) {
return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
}
struct {
Double_t epsmin, epsmax, eeps, eta;
Int_t layerm, lclass, nevl, nblearn, nunilec, nunisor, nunishort, nunap;
Int_t nvar, itest, ndiv, ichoi, ndivis, nevt;
} fParam_1;
struct {
Double_t xmax[max_nVar_], xmin[max_nVar_];
Int_t nclass[max_Events_], mclass[max_Events_], iclass;
} fVarn_1;
class VARn2 {
public:
VARn2() {
fxx = 0;
}
~VARn2() {
Delete();
}
void Create( Int_t nevt, Int_t nvar ) {
fNevt = nevt+1; fNvar = nvar+1;
fxx = new Double_t*[fNevt];
for (Int_t i=0; i<fNevt; i++) fxx[i] = new Double_t[fNvar];
}
Double_t operator=( Double_t val ) { return val; }
Double_t &operator()( Int_t ievt, Int_t ivar ) const {
if (0 != fxx && ievt < fNevt && ivar < fNvar) return fxx[ievt][ivar];
else {
printf( "*** ERROR in varn3_(): fxx is zero pointer ==> abort ***\n") ;
return fxx[0][0];
}
}
void Delete( void ) {
if (0 != fxx) for (Int_t i=0; i<fNevt; i++) if (0 != fxx[i]) delete [] fxx[i];
delete[] fxx;
fxx=0;
}
Double_t** fxx;
Int_t fNevt;
Int_t fNvar;
} fVarn2_1, fVarn3_1;
struct {
Double_t x [max_nLayers_*max_nNodes_];
Double_t y [max_nLayers_*max_nNodes_];
Double_t o [max_nNodes_];
Double_t w [max_nLayers_*max_nNodes_*max_nNodes_];
Double_t ww[max_nLayers_*max_nNodes_];
Double_t cut[max_nNodes_];
Double_t deltaww[max_nLayers_*max_nNodes_];
Int_t neuron[max_nLayers_];
} fNeur_1;
struct {
Double_t coef [max_nNodes_], temp[max_nLayers_], demin, demax;
Double_t del [max_nLayers_*max_nNodes_];
Double_t delw [max_nLayers_*max_nNodes_*max_nNodes_];
Double_t delta[max_nLayers_*max_nNodes_*max_nNodes_];
Double_t delww[max_nLayers_*max_nNodes_];
Int_t idde;
} fDel_1;
struct {
Double_t ancout, tolcou;
Int_t ieps;
} fCost_1;
ClassDef(MethodCFMlpANN_Utils,0)
};
}
#endif
Last change: Wed Jun 25 08:48:22 2008
Last generated: 2008-06-25 08:48
This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.