Som.h

00001 #ifndef Som_H
00002 #define Som_H
00003 
00004 #include <math.h>
00005 #include <iostream>
00006 #include "defs.h"
00007 #include "util.h"
00008 #include "eigensystems.h"
00009 #include "NetParams.h"
00010 #include "LearningFactory.h"
00011 
00012 
00020 class SOM {
00021 public:
00023         SOM( NetParams n );     
00024 
00026         SOM();
00027 
00028         
00030         SOM( NetParams * n, Mapcode * map);
00031 
00033         ~SOM();
00034         
00035         NetParams netParams;
00036         
00037         LearningAlgorithm * getLearningAlgorithm();     
00038         
00039 protected:
00040         Value_Type              _quantization_error;
00041         Value_Type              _topological_error;
00042         void            setQuantizationError( Value_Type q );
00043         void            setTopologicalError( Value_Type t );
00044         LearningAlgorithm * _learningAlgorithm; 
00045         void            RandomInit();
00046         void            LinearInit();
00047         void            WindowInit();
00048         
00049         
00050 
00051 public: 
00052         void Quality();
00053         
00055         int AutomaticParametrization();
00056 
00058         void Learning();
00059 
00061         int EECNI( int fi );
00062 
00064         void InitMapcode();     
00065         
00067         Value_Type getQuantizationError();
00068 
00070         Value_Type getTopologicalError();
00071 
00073         void Labeling();
00074 
00076         void setParams( NetParams n );
00077 
00079         void setMapcode( Mapcode * mapcode );
00080 
00082         void setData( SOMData * data );
00083         
00085         void setDelta( TMatrix delta );
00086 
00088         TMatrix getDelta() const;
00089 
00091         Mapcode * getMapcode() const;
00092 
00094         SOMData * getData() const;
00095 
00097         int       getNumIterations();
00098 
00100         void    setNumIterations( int i );
00101 
00103         int             getInitType();
00104 
00106         void    setInitType( int init );
00107 
00109         Value_Type      getInitLearningRate();
00110 
00112         void    setInitLearningRate( Value_Type init_rate );
00113 
00115         Value_Type      getInitNeighbor();
00116 
00118         void    setInitNeighbor( Value_Type init_neighbor );
00119 
00121         int             getLearningType();
00122 
00124         void    setLearningType( int t );
00125 
00127         void setLearnType( string str );
00128 
00130         string getLearnType();
00131 
00132         Value_Type getEigenValue( int index, int d, int n, TMatrix entries );
00133 
00134         vector<int> getNeighbors( int i );
00135 
00136         TMatrix getCities() ;
00137 
00138 private:
00139         TVector eigenvalue;
00140 
00141 };
00142 
00143 vector<int> 
00144 SOM::getNeighbors( int i )
00145 {
00146         return getLearningAlgorithm()->getTopology()->getNeighbors( i, getMapcode()->getLattice(), getMapcode()->getDimensions() );
00147 };
00148 
00149 Value_Type 
00150 SOM::getEigenValue( int index, int d, int n, TMatrix entries ) {
00151         
00152         if ( ( index >= 1 ) && ( index <= d ) ) {
00153                 int i, j, k;
00154                 Value_Type soma = 0.0;
00155                 TVector mean_vector;
00156                 TMatrix Data_tmp;
00157                 TMatrix A;  // Autocorrelation matrix of Data   
00158 
00159                 // init A, Data_tmp and mean_vector
00160                 A = create_matrix( 1, d, 1, d );
00161                 for (i=1; i<= d; i++ ) for (j=1; j<= d; j++) A[i][j] = 0.0;
00162                 mean_vector = create_vector( 1, d ); for (i=1; i<= d; i++) mean_vector[i] = 0.0;
00163         
00164 
00165                 Data_tmp = create_matrix( 1, n, 1, d ); 
00166                 for (i=1; i<= n; i++) for (j=1; j<= d; j++) Data_tmp[i][j] = entries[i][j];
00167 
00168 
00169                 for (j=1; j<= d; j++) for (i=1; i<=n; i++) mean_vector[j] += entries[i][j];
00170                 for (i=1; i<= d; i++) mean_vector[i] = mean_vector[i]/n;
00171 
00172                 for (i=1; i<= n; i++ ) for (j=1; j<= d; j++) Data_tmp[i][j] -= mean_vector[j]; 
00173 
00174                 for (i=1; i<= d; i++) for (j=i; j<= d; j++) { for (k=1; k<= n; k++) soma += Data_tmp[k][i]*Data_tmp[k][j];
00175                         A[i][j] = A[j][i] = soma/n; soma = 0.0; };      
00176         
00177                 TVector eigenvalue = create_vector(1, d);
00178                 TMatrix eigenvectors = create_matrix( 1, d, 1, d );
00179                 int *nrot = 0;
00180 
00181                 jacobi( A, d, eigenvalue, eigenvectors, nrot );
00182                 eigsrt( eigenvalue, eigenvectors, d );
00183 
00184                 return eigenvalue[index];
00185         }       
00186         return 0;
00187 };
00188 
00189 void 
00190 SOM::setParams( NetParams n ) {
00191         netParams = n;
00192 
00193         _learningAlgorithm = LearningFactory::make(n.getLearnType(), n.getMapcode()->getTopolParams() );
00194         
00195         TMatrix codebook = create_matrix( 1, n.getMapcode()->getMapSize(), 1, n.getData()->getDimension() );
00196         netParams.getMapcode()->setCodebook( codebook );
00197 };
00198 
00199 
00200 void 
00201 SOM::Labeling() {
00202         int i;
00203         int m                           = netParams.getMapcode()->getMapSize();
00204         int n                           = netParams.getData()->getDataSize();
00205         TBMU * bmu                      = getLearningAlgorithm()->getBMU();
00206         TLabel mapLabel                 = netParams.getMapcode()->getLabel();
00207 
00208 
00209 
00210         TvecLabel dataLabel             = netParams.getData()->getId(); 
00211         
00212         TvecLabel vec; vec.clear();
00213 
00214         
00215         mapLabel.clear();
00216 
00217         for (i=0; i<m; i++) mapLabel.push_back( vec );
00218 
00219         for (i=0; i<n; i++) {           
00220                 mapLabel[ bmu[0][i][0].i - 1 ].push_back( dataLabel[i] );
00221         }
00222 
00223         netParams.getMapcode()->setLabel( mapLabel );
00224 
00225         /*      
00226         TvecLabel::iterator j;
00227         for (i=0; i<m; i++) {
00228                 cout << endl; cout << "No " << " " << i+1 << " ";               
00229                 jj = 0;
00230                 for (j=mapLabel[i].begin(); j< mapLabel[i].end(); j++) {
00231                         cout << *j << " "; ++jj;
00232                 }
00233         }
00234         */
00235         
00236 };
00237 
00238 Value_Type 
00239 SOM::getQuantizationError() {
00240         return _quantization_error;
00241 };
00242 
00243 Value_Type 
00244 SOM::getTopologicalError() {
00245         return _topological_error;
00246 };
00247 
00248 void 
00249 SOM::setQuantizationError( Value_Type q ) {
00250         _quantization_error = q;
00251 };
00252 
00253 void 
00254 SOM::setTopologicalError( Value_Type t ) {
00255         _topological_error = t;
00256 };
00257 
00258 void 
00259 SOM::Quality() {
00260 
00261         Value_Type topol_error  = 0.0;
00262         Value_Type quant_error  = 0.0;
00263         int i;  
00264         int n                           = netParams.getData()->getDataSize();   
00265         TBMU * bmu                      = getLearningAlgorithm()->getBMU();
00266         TMatrix delta           = netParams.getDelta();
00267         
00268         for (i=1; i<=n; i++) {
00269                 if ( delta[bmu[0][i-1][0].i][bmu[0][i-1][1].i] > 1.4 ) ++topol_error;           
00270                 quant_error += bmu[0][i-1][0].dist;
00271         }
00272 
00273         topol_error /= (Value_Type)n;
00274         quant_error /= (Value_Type)n;
00275 
00276         setQuantizationError( quant_error );
00277         setTopologicalError( topol_error  );
00278 };
00279 
00280 
00281 SOM::SOM( NetParams n ) 
00282 {
00283         netParams = n;
00284         
00285         _learningAlgorithm = LearningFactory::make(n.getLearnType(), n.getMapcode()->getTopolParams() );
00286         
00287         TMatrix codebook = create_matrix( 1, n.getMapcode()->getMapSize(), 1, n.getData()->getDimension() );
00288         netParams.getMapcode()->setCodebook( codebook );
00289         
00290 };
00291 
00292 //adicionada para o exemplo em que eh feita a leitura do mapcode
00293 SOM::SOM( NetParams * n, Mapcode * map)
00294 {
00295         netParams = *n;
00296         
00297         _learningAlgorithm = LearningFactory::make(n->getLearnType(), n->getMapcode()->getTopolParams() );
00298         
00299         netParams.setMapcode( map );
00300 }
00301 
00302 
00303 SOM::SOM()
00304 {
00305         getMapcode()->setDimension( DTWO );
00306         TopolParams Default_TopolParam( Default_DimensionType );
00307         setLearnType( Default_LearningType );
00308         netParams.setLearningType( BATCH );
00309         
00310 
00311         _learningAlgorithm = LearningFactory::make( Default_LearningType, Default_TopolParam );
00312 };
00313 
00314 SOM::~SOM() {
00315         free( _learningAlgorithm );
00316 };
00317 
00318 
00319 LearningAlgorithm * 
00320 SOM::getLearningAlgorithm() { return _learningAlgorithm; };
00321 
00322 void 
00323 SOM::Learning() {
00324    getLearningAlgorithm()->Learning( netParams );
00325    //Quality();
00326 };
00327 
00328 int 
00329 SOM::EECNI( int fi ) {    
00330   return getLearningAlgorithm()->EECNI( netParams, fi );
00331 };
00332 
00333 void 
00334 SOM::RandomInit() {
00335    TVector vector_min; 
00336    TVector vector_max; 
00337    TVector vector_inc;
00338    int i, j;
00339    int d = netParams.getData()->getDimension();
00340    int n = netParams.getData()->getDataSize();
00341    int m = netParams.getMapcode()->getMapSize();
00342 
00343    TMatrix entries = netParams.getData()->getEntries();
00344    TMatrix codebook = netParams.getMapcode()->getCodebook();
00345 
00346    // create vectors
00347    vector_min = create_vector( 1, d );
00348    vector_max = create_vector( 1, d );
00349    vector_inc = create_vector( 1, d );
00350 
00351    // init vectors
00352    for (i=1; i<= d; i++ )  {
00353             vector_min[i] = entries[1][i];
00354                 vector_max[i] = entries[1][i]; };
00355    
00356    // find the min vector and the max vector
00357    for (i=2; i<= n; i++) {                 
00358            for (j=1; j<= d; j++) {
00359                    if ( entries[i][j] < vector_min[j] ) vector_min[j] = entries[i][j];
00360                    if ( entries[i][j] > vector_max[j] ) vector_max[j] = entries[i][j]; }; };
00361    
00362    // init inc vector   
00363    for (i=1; i<= d; i++ )  vector_inc[i] = (vector_max[i] - vector_min[i])/(m - 1);
00364 
00365    // create the random mapcode based in a simple interporlation
00366    for (i=1; i<= d; i++ ) codebook[1][i] = vector_min[i];
00367    for (i=1; i<= d; i++ ) codebook[m][i] = vector_max[i];
00368    for (i=2; i<= (m-1); i++) for (j=1; j<= d; j++) 
00369                                 codebook[i][j] = vector_min[j] + (i-1)*vector_inc[j];
00370    // free memory
00371    free_vector( vector_min, 1, d );
00372    free_vector( vector_max, 1, d );
00373    free_vector( vector_inc, 1, d );
00374 };
00375 
00376 void 
00377 SOM::LinearInit() {
00378         int i, j, k;
00379         Value_Type soma = 0.0;
00380         
00381         int d                           = netParams.getData()->getDimension();  // data dimension       
00382         int m                           = netParams.getMapcode()->getMapSize();
00383         int n                           = netParams.getData()->getDataSize();
00384         TMatrix entries         = netParams.getData()->getEntries();    
00385         TMatrix codebook        = netParams.getMapcode()->getCodebook();
00386         TVector mean_vector;
00387         TMatrix Data_tmp;
00388         TMatrix A;  // Autocorrelation matrix of Data   
00389 
00390 
00391         // init A, Data_tmp and mean_vector
00392         A = create_matrix( 1, d, 1, d );
00393         for (i=1; i<= d; i++ ) for (j=1; j<= d; j++) A[i][j] = 0.0;
00394         mean_vector = create_vector( 1, d ); for (i=1; i<= d; i++) mean_vector[i] = 0.0;
00395         
00396 
00397         Data_tmp = create_matrix( 1, n, 1, d ); 
00398         for (i=1; i<= n; i++) for (j=1; j<= d; j++) Data_tmp[i][j] = entries[i][j];
00399 
00400 
00401         for (j=1; j<= d; j++) for (i=1; i<=n; i++) mean_vector[j] += entries[i][j];
00402         for (i=1; i<= d; i++) mean_vector[i] = mean_vector[i]/n;
00403 
00404         for (i=1; i<= n; i++ ) for (j=1; j<= d; j++) Data_tmp[i][j] -= mean_vector[j]; 
00405 
00406         for (i=1; i<= d; i++) for (j=i; j<= d; j++) { for (k=1; k<= n; k++) soma += Data_tmp[k][i]*Data_tmp[k][j];
00407                         A[i][j] = A[j][i] = soma/n; soma = 0.0; };      
00408         
00409         TVector eigenvalues = create_vector(1, d);
00410         TMatrix eigenvectors = create_matrix( 1, d, 1, d );
00411         int *nrot = 0;
00412 
00413         jacobi( A, d, eigenvalues, eigenvectors, nrot );
00414         eigsrt( eigenvalues, eigenvectors, d );
00415 
00416         for (i=1; i<=d; i++)
00417                 for (j=1; j<=d; j++ )
00418                         eigenvectors[j][i] = eigenvectors[j][i]*sqrt( eigenvalues[i] ); 
00419         
00420         //eigenvectors[1][1] *= -1.0;
00421         //eigenvectors[2][1] *= -1.0;
00422         //eigenvectors[3][1] *= -1.0;
00423 
00424         eigenvalue = eigenvalues;
00425 
00426         for (i=1; i<=d;i++)
00427                 for (j=1; j<=m; j++)
00428                         codebook[j][i] = mean_vector[i];
00429 
00430         int dimension = netParams.getMapcode()->getDimension();         
00431         TMatrix const_lininit = getLearningAlgorithm()->getTopology()->LinInitCoords(m,netParams.getMapcode()->getDimensions());
00432         
00433         for (i=1;i<=m;i++)
00434                 for (j=1;j<=dimension;j++)
00435                    for (k=1;k<=d;k++)
00436                      codebook[i][k] += eigenvectors[k][j]*const_lininit[i][j];                          
00437 };
00438  
00439 void 
00440 SOM::InitMapcode() {
00441         netParams.setDelta( getLearningAlgorithm()->getTopology()->CreateDelta( netParams.getMapcode()->getDimensions(), netParams.getMapcode()->getLattice(), netParams.getMapcode()->getMapSize() ) );
00442         switch( netParams.getInitType() ) {
00443         case RANDOMIZED :       RandomInit();            
00444                                                 break;
00445         case LINEAR:            LinearInit();
00446                                                 break;
00447         case WINDOW:            WindowInit();
00448                                                 break;
00449         };      
00450 
00451 };
00452 
00453 
00454 void 
00455 SOM::setMapcode( Mapcode * mapcode ) 
00456 {
00457         netParams.setMapcode( mapcode );
00458 };
00459 
00460 
00461 void 
00462 SOM::setData( SOMData * data ) 
00463 {
00464         netParams.setData( data );
00465 };
00466         
00467 
00468 void 
00469 SOM::setDelta( TMatrix delta ) 
00470 {
00471         netParams.setDelta( delta );
00472 };
00473 
00474 
00475 TMatrix 
00476 SOM::getDelta() const
00477 {
00478         return netParams.getDelta();
00479 };
00480 
00481 
00482 
00483 Mapcode * 
00484 SOM::getMapcode() const 
00485 {
00486         return netParams.getMapcode();
00487 };
00488 
00489 
00490 SOMData * 
00491 SOM::getData() const
00492 {
00493         return netParams.getData();
00494 };
00495 
00496 
00497 int             
00498 SOM::getNumIterations() 
00499 {
00500         return netParams.getNumIterations();
00501 };
00502 
00503 
00504 void    
00505 SOM::setNumIterations( int i ) 
00506 {
00507         netParams.setNumIterations( i );
00508 };
00509 
00510 
00511 int             
00512 SOM::getInitType() 
00513 {
00514         return netParams.getInitType();
00515 };
00516 
00517 
00518 void    
00519 SOM::setInitType( int init ) 
00520 {
00521         netParams.setInitType( init );
00522 };
00523 
00524 
00525 Value_Type      
00526 SOM::getInitLearningRate() 
00527 {
00528         return netParams.getInitLearningRate();
00529 };
00530 
00531 
00532 void    
00533 SOM::setInitLearningRate( Value_Type init_rate ) 
00534 {
00535         netParams.setInitLearningRate( init_rate );
00536 };
00537 
00538 
00539 Value_Type      
00540 SOM::getInitNeighbor() 
00541 {
00542         return netParams.getInitNeighbor();
00543 };
00544 
00545 
00546 void    
00547 SOM::setInitNeighbor( Value_Type init_neighbor ) 
00548 {
00549         netParams.setInitNeighbor( init_neighbor );
00550 };
00551 
00552 
00553 int             
00554 SOM::getLearningType()
00555 {
00556         return netParams.getLearningType();
00557 };
00558 
00559 
00560 void    
00561 SOM::setLearningType( int t )
00562 {
00563         netParams.setLearningType( t );
00564 };
00565 
00566 
00567 void 
00568 SOM::setLearnType( string str )
00569 {
00570         netParams.setLearnType( str );
00571 
00572         //_learningAlgorithm = LearningFactory::make( getLearnType(), getMapcode()->getTopolParams() );
00573 
00574 };
00575 
00576 
00577 string 
00578 SOM::getLearnType()
00579 {
00580         return netParams.getLearnType();
00581 };
00582 
00583 
00584 
00585 TMatrix
00586 SOM::getCities()
00587 {
00588         return netParams.getData()->getEntries();
00589 };
00590 
00591 
00592 void 
00593 SOM::WindowInit() {
00594    int i;
00595    int d = netParams.getData()->getDimension();
00596    int n = netParams.getData()->getDataSize();  // the total of cities 
00597    int m = netParams.getMapcode()->getMapSize(); // the total of neurons on the toros
00598    int total = m ;
00599    Value_Type x_min , x_max , y_min , y_max ;
00600 
00601    TMatrix entries = netParams.getData()->getEntries();
00602    TMatrix codebook = netParams.getMapcode()->getCodebook();
00603 
00604    // searching for the smaller and bigger x coordinate of all the cities
00605    x_min = entries[1][1] ;
00606    x_max = entries[1][1] ;
00607    for (i=2; i<=n ; i++) {
00608         if (entries[i][1] < x_min) x_min = entries[i][1] ;
00609         if (entries[i][1] > x_max) x_max = entries[i][1] ;
00610         };
00611    // searching for the smaller and bigger y coordinate of all the cities
00612    y_min = entries[1][2] ;
00613    y_max = entries[1][2] ;
00614    for (i=2; i<=n ; i++) {
00615         if (entries[i][2] < y_min) y_min = entries[i][2] ;
00616         if (entries[i][2] > y_max) y_max = entries[i][2] ;
00617         };
00618 
00619    // create the frame mapcode based in the x_min , x_max , y_min , y_max coordinates
00620 
00621    Value_Type perimeter = 2*(x_max - x_min) + 2*(y_max - y_min); // calculates the lenght of the frame
00622    Value_Type space = perimeter/total ;                              // space between the nodes on the frame
00623    char side = 'W' ;                                             // side of the frame 
00624    Value_Type x_factor , y_factor , rest_factor ,                // variables that regulate the coordinates of
00625               x_cur_node , y_cur_node ;                          // the nodes on the frame during its initialization
00626    int half_path = 1 ;                                           // indicates if the process is before or after the half path
00627    x_factor = 0.0 ;
00628    y_factor = space ;
00629    rest_factor = 0.0 ;
00630    int current_node = 0 ; // <-- which is the initial index ?? 0 or 1 ??
00631 
00632    x_cur_node = x_min ;
00633    y_cur_node = y_min ;
00634 
00635    while (side == 'W')
00636       {
00637       codebook[current_node][1] = x_cur_node ;
00638       codebook[current_node][2] = y_cur_node ;
00639       //cout << current_node << ':' << x_cur_node << ',' << y_cur_node << '\n' ;
00640       y_cur_node += space*half_path ;
00641       if (y_cur_node > y_max )
00642          {
00643          side = 'N' ;
00644          rest_factor = y_cur_node - y_max ;
00645          }
00646       current_node++ ;
00647       }
00648 
00649    x_cur_node = x_min + rest_factor ;
00650    y_cur_node = y_max ;
00651 
00652    while (side == 'N')
00653       {
00654       codebook[current_node][1] = x_cur_node ;
00655       codebook[current_node][2] = y_cur_node ;
00656       //cout << current_node << ':' << x_cur_node << ',' << y_cur_node << '\n' ;
00657       x_cur_node += space*half_path ;
00658       if (x_cur_node > x_max)
00659          {
00660          side = 'E' ;
00661          rest_factor = x_cur_node - x_max ;
00662          }
00663       current_node++ ;
00664       }
00665 
00666    x_cur_node = x_max ;
00667    y_cur_node = y_max - rest_factor ;
00668 
00669    half_path = -1 ;
00670 
00671    while (side == 'E')
00672       {
00673       codebook[current_node][1] = x_cur_node ;
00674       codebook[current_node][2] = y_cur_node ;
00675       //cout << current_node << ':' << x_cur_node << ',' << y_cur_node << '\n' ;
00676       y_cur_node += space*half_path ;
00677       if (y_cur_node < y_min)
00678          {
00679          side = 'S' ;
00680          rest_factor = y_min - y_cur_node ;
00681          }
00682       current_node++ ;
00683       }
00684 
00685    x_cur_node = x_max - rest_factor ;
00686    y_cur_node = y_min ;
00687 
00688    while (side == 'S')
00689       {
00690       codebook[current_node][1] = x_cur_node ;
00691       codebook[current_node][2] = y_cur_node ;
00692       //cout << current_node << ':' << x_cur_node << ',' << y_cur_node << '\n' ;
00693       x_cur_node += space*half_path ;
00694       if (current_node >= total)
00695          {
00696          side = 'F' ;
00697          }
00698       current_node++ ;
00699       }
00700 };
00701 
00702 
00703 
00704 
00705 #endif

Generated on Tue Aug 7 16:03:33 2007 for SOMCode by  doxygen 1.5.3