PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // ManifoldParzen.cc 00004 // 00005 // Copyright (C) 2007 Hugo Larochelle 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Hugo Larochelle 00036 00040 #define PL_LOG_MODULE_NAME "ManifoldParzen" 00041 #include <plearn/io/pl_log.h> 00042 00043 #include "ManifoldParzen.h" 00044 #include <plearn/vmat/VMat_computeNearestNeighbors.h> 00045 #include <plearn/vmat/GetInputVMatrix.h> 00046 #include <plearn_learners/online/GradNNetLayerModule.h> 00047 #include <plearn/math/plapack.h> 00048 00049 namespace PLearn { 00050 using namespace std; 00051 00052 PLEARN_IMPLEMENT_OBJECT( 00053 ManifoldParzen, 00054 "Manifold Parzen Windows classifier and distribution", 00055 ""); 00056 00057 ManifoldParzen::ManifoldParzen() : 00058 nneighbors( 1 ), 00059 ncomponents( 1 ), 00060 global_lambda0( 0 ), 00061 learn_mu( false ), 00062 nclasses( -1 ) 00063 { 00064 } 00065 00066 void ManifoldParzen::declareOptions(OptionList& ol) 00067 { 00068 declareOption(ol, "nneighbors", 00069 &ManifoldParzen::nneighbors, 00070 OptionBase::buildoption, 00071 "Number of nearest neighbors to use to learn " 00072 "the manifold structure..\n"); 00073 00074 declareOption(ol, "ncomponents", 00075 &ManifoldParzen::ncomponents, 00076 OptionBase::buildoption, 00077 "Dimensionality of the manifold.\n"); 00078 00079 declareOption(ol, "global_lambda0", 00080 &ManifoldParzen::global_lambda0, 00081 OptionBase::buildoption, 00082 "Additive minimum value for the variance in all directions.\n"); 00083 00084 declareOption(ol, "learn_mu", 00085 &ManifoldParzen::learn_mu, 00086 OptionBase::buildoption, 00087 "Additive minimum value for the variance in all directions.\n"); 00088 00089 declareOption(ol, "nclasses", 00090 &ManifoldParzen::nclasses, 00091 OptionBase::buildoption, 00092 "Number of classes. If nclasses = 1, learner will output\n" 00093 "log likelihood of a given input. If nclasses > 1,\n" 00094 "classification will be performed.\n"); 00095 00096 declareOption(ol, "train_set", 00097 &ManifoldParzen::train_set, 00098 OptionBase::learntoption, 00099 "Training set.\n" 00100 ); 00101 00102 declareOption(ol, "eigenvalues", 00103 &ManifoldParzen::eigenvalues, 00104 OptionBase::learntoption, 00105 "" 00106 ); 00107 00108 declareOption(ol, "sigma_noises", 00109 &ManifoldParzen::sigma_noises, 00110 OptionBase::learntoption, 00111 "" 00112 ); 00113 00114 declareOption(ol, "mus", 00115 &ManifoldParzen::mus, 00116 OptionBase::learntoption, 00117 "" 00118 ); 00119 00120 // Now call the parent class' declareOptions 00121 inherited::declareOptions(ol); 00122 } 00123 00124 void ManifoldParzen::build_() 00125 { 00126 // ### This method should do the real building of the object, 00127 // ### according to set 'options', in *any* situation. 00128 // ### Typical situations include: 00129 // ### - Initial building of an object from a few user-specified options 00130 // ### - Building of a "reloaded" object: i.e. from the complete set of 00131 // ### all serialised options. 00132 // ### - Updating or "re-building" of an object after a few "tuning" 00133 // ### options have been modified. 00134 // ### You should assume that the parent class' build_() has already been 00135 // ### called. 00136 00137 MODULE_LOG << "build_() called" << endl; 00138 00139 if(inputsize_ > 0 ) 00140 { 00141 // Builds some variables using the training set 00142 setTrainingSet(train_set, false); 00143 00144 if( nclasses <= 0 ) 00145 PLERROR("ManifoldParzen::build_() - \n" 00146 "nclasses should be > 0.\n"); 00147 test_votes.resize(nclasses); 00148 00149 if( nneighbors <= 0 ) 00150 PLERROR("ManifoldParzen::build_() - \n" 00151 "nneighbors should be > 0.\n"); 00152 00153 if( weightsize_ > 0 ) 00154 PLERROR("ManifoldParzen::build_() - \n" 00155 "usage of weighted samples (weight size > 0) is not\n" 00156 "implemented yet.\n"); 00157 00158 if( ncomponents < 1 || ncomponents > inputsize_) 00159 PLERROR("ManifoldParzen::build_() - \n" 00160 "ncomponents should be > 0 and < or = to inputsize.\n"); 00161 00162 if( global_lambda0 < 0) 00163 PLERROR("ManifoldParzen::build_() - \n" 00164 "global_lambda0 should be > or = to 0.\n"); 00165 00166 } 00167 } 00168 00169 // ### Nothing to add here, simply calls build_ 00170 void ManifoldParzen::build() 00171 { 00172 inherited::build(); 00173 build_(); 00174 } 00175 00176 00177 void ManifoldParzen::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00178 { 00179 inherited::makeDeepCopyFromShallowCopy(copies); 00180 // deepCopyField(, copies); 00181 00182 // Protected options 00183 deepCopyField(mu, copies); 00184 deepCopyField(Ut, copies); 00185 deepCopyField(U, copies); 00186 deepCopyField(V, copies); 00187 deepCopyField(diff_neighbor_input, copies); 00188 deepCopyField(uk, copies); 00189 deepCopyField(sm_svd, copies); 00190 deepCopyField(S, copies); 00191 deepCopyField(diff, copies); 00192 deepCopyField(eigenvectors, copies); 00193 deepCopyField(eigenvalues, copies); 00194 deepCopyField(sigma_noises, copies); 00195 deepCopyField(mus, copies); 00196 deepCopyField(class_datasets, copies); 00197 deepCopyField(nearest_neighbors_indices, copies); 00198 deepCopyField(test_votes, copies); 00199 } 00200 00201 00202 int ManifoldParzen::outputsize() const 00203 { 00204 return 1; 00205 } 00206 00207 void ManifoldParzen::forget() 00208 { 00212 00219 inherited::forget(); 00220 00221 for(int i=0; i < eigenvectors.length(); i++) 00222 eigenvectors[i].clear(); 00223 eigenvalues.clear(); 00224 sigma_noises.clear(); 00225 mus.clear(); 00226 stage = 0; 00227 } 00228 00229 void ManifoldParzen::train() 00230 { 00231 MODULE_LOG << "train() called " << endl; 00232 00233 Vec input( inputsize() ); 00234 Vec nearest_neighbor( inputsize() ); 00235 Mat nearest_neighbors( nneighbors, inputsize() ); 00236 Vec target( targetsize() ); 00237 Vec target2( targetsize() ); 00238 real weight; // unused 00239 real weight2; // unused 00240 00241 TVec<string> train_cost_names = getTrainCostNames() ; 00242 Vec train_costs( train_cost_names.length() ); 00243 train_costs.fill(MISSING_VALUE) ; 00244 00245 int sample; 00246 PP<ProgressBar> pb; 00247 00248 // clear stats of previous epoch 00249 train_stats->forget(); 00250 00251 if( stage < 1 && nstages > 0 ) 00252 { 00253 stage = 1; 00254 MODULE_LOG << "Finding the nearest neighbors" << endl; 00255 // Find training nearest neighbors 00256 TVec<int> nearest_neighbors_indices_row; 00257 nearest_neighbors_indices.resize(train_set->length(), nneighbors); 00258 if( nclasses > 1 ) 00259 for(int k=0; k<nclasses; k++) 00260 { 00261 for(int i=0; i<class_datasets[k]->length(); i++) 00262 { 00263 class_datasets[k]->getExample(i,input,target,weight); 00264 nearest_neighbors_indices_row = nearest_neighbors_indices( 00265 class_datasets[k]->indices[i]); 00266 00267 computeNearestNeighbors( 00268 new GetInputVMatrix((VMatrix *)class_datasets[k]),input, 00269 nearest_neighbors_indices_row, 00270 i); 00271 } 00272 } 00273 else 00274 for(int i=0; i<train_set->length(); i++) 00275 { 00276 train_set->getExample(i,input,target,weight); 00277 nearest_neighbors_indices_row = nearest_neighbors_indices(i); 00278 computeNearestNeighbors( 00279 train_set,input, 00280 nearest_neighbors_indices_row, 00281 i); 00282 } 00283 00284 train_costs.fill(MISSING_VALUE); 00285 00286 if( report_progress ) 00287 pb = new ProgressBar( "Training ManifoldParzen", 00288 train_set->length() ); 00289 00290 eigenvectors.resize( train_set->length() ); 00291 eigenvalues.resize( train_set->length(), ncomponents ); 00292 sigma_noises.resize( train_set->length(), 1 ); 00293 mus.resize( train_set->length(), inputsize() ); 00294 mus.clear(); 00295 for( sample = 0; sample < train_set->length() ; sample++ ) 00296 { 00297 train_set->getExample( sample, input, target, weight ); 00298 00299 // Find nearest neighbors 00300 if( nclasses > 1 ) 00301 for( int k=0; k<nneighbors; k++ ) 00302 { 00303 class_datasets[(int)round(target[0])]->getExample( 00304 nearest_neighbors_indices(sample,k), 00305 nearest_neighbor, target2, weight2); 00306 00307 if(round(target[0]) != round(target2[0])) 00308 PLERROR("ManifoldParzen::train(): similar" 00309 " example is not from same class!"); 00310 nearest_neighbors(k) << nearest_neighbor; 00311 } 00312 else 00313 for( int k=0; k<nneighbors; k++ ) 00314 { 00315 train_set->getExample( 00316 nearest_neighbors_indices(sample,k), 00317 nearest_neighbor, target2, weight2); 00318 nearest_neighbors(k) << nearest_neighbor; 00319 } 00320 00321 if( learn_mu ) 00322 { 00323 mu.resize(inputsize()); 00324 columnMean( nearest_neighbors, mu ); 00325 mus(sample) << mu; 00326 mus(sample) -= input; 00327 } 00328 substractFromRows(nearest_neighbors, input, false); // Boolean is somehow unused??? 00329 lapackSVD(nearest_neighbors, Ut, S, V,'A',1.5); 00330 eigenvectors[sample].resize(ncomponents,inputsize()); 00331 for (int k=0;k<ncomponents;k++) 00332 { 00333 eigenvalues(sample,k) = mypow(S[k],2)/nneighbors; 00334 eigenvectors[sample](k) << Ut(k); 00335 } 00336 sigma_noises[sample] = 0; // HUGO: Seems stupid for now, but I keep 00337 // this variable in case I want to use 00338 // the last eigen value or something... 00339 00340 if( pb ) 00341 pb->update( sample + 1 ); 00342 } 00343 } 00344 00345 train_stats->finalize(); 00346 MODULE_LOG << " train costs = " << train_stats->getMean() << endl; 00347 } 00348 00349 void ManifoldParzen::computeOutput(const Vec& input, Vec& output) const 00350 { 00351 00352 test_votes.resize(nclasses); 00353 test_votes.clear(); 00354 00355 // Variables for probability computations 00356 real log_p_x_g_y = 0; 00357 real mahal = 0; 00358 real norm_term = 0; 00359 real n = inputsize(); 00360 real dotp = 0; 00361 real coef = 0; 00362 real sigma_noise = 0; 00363 00364 Vec input_j(inputsize()); 00365 Vec target(targetsize()); 00366 real weight; 00367 00368 U.resize( ncomponents, inputsize() ); 00369 sm_svd.resize( ncomponents ); 00370 mu.resize( inputsize() ); 00371 00372 00373 int input_j_index; 00374 for( int i=0; i<nclasses; i++ ) 00375 { 00376 for( int j=0; 00377 j<(nclasses > 1 ? 00378 class_datasets[i]->length() 00379 : train_set->length()); 00380 j++ ) 00381 { 00382 if( nclasses > 1 ) 00383 { 00384 class_datasets[i]->getExample(j,input_j,target,weight); 00385 input_j_index = class_datasets[i]->indices[j]; 00386 } 00387 else 00388 { 00389 train_set->getExample(j,input_j,target,weight); 00390 input_j_index = j; 00391 } 00392 00393 U << eigenvectors[input_j_index]; 00394 sm_svd << eigenvalues(input_j_index); 00395 sigma_noise = sigma_noises[input_j_index] + global_lambda0; 00396 mu << mus(input_j_index); 00397 00398 substract(input,input_j,diff_neighbor_input); 00399 substract(diff_neighbor_input,mu,diff); 00400 00401 mahal = -0.5*pownorm(diff)/sigma_noise; 00402 norm_term = - n/2.0 * Log2Pi - 0.5*(n-ncomponents)* 00403 pl_log(sigma_noise); 00404 00405 for(int k=0; k<ncomponents; k++) 00406 { 00407 uk = U(k); 00408 dotp = dot(diff,uk); 00409 coef = (1.0/(sm_svd[k]+global_lambda0) - 1.0/sigma_noise); 00410 mahal -= dotp*dotp*0.5*coef; 00411 norm_term -= 0.5*pl_log(sm_svd[k]+global_lambda0); 00412 } 00413 00414 if( j==0 ) 00415 log_p_x_g_y = norm_term + mahal; 00416 else 00417 log_p_x_g_y = logadd(norm_term + mahal, log_p_x_g_y); 00418 } 00419 00420 test_votes[i] = log_p_x_g_y; 00421 } 00422 00423 if( nclasses > 1 ) 00424 output[0] = argmax(test_votes); 00425 else 00426 output[0] = test_votes[0]-pl_log(train_set->length()); 00427 } 00428 00429 void ManifoldParzen::computeCostsFromOutputs(const Vec& input, const Vec& output, 00430 const Vec& target, Vec& costs) const 00431 { 00432 00433 //Assumes that computeOutput has been called 00434 00435 costs.resize( getTestCostNames().length() ); 00436 costs.fill( MISSING_VALUE ); 00437 00438 if( nclasses > 1 ) 00439 { 00440 int target_class = ((int)round(target[0])); 00441 if( ((int)round(output[0])) == target_class ) 00442 costs[0] = 0; 00443 else 00444 costs[0] = 1; 00445 costs[1] = - test_votes[target_class] 00446 +pl_log(class_datasets[target_class]->length()); // Must take into account the 1/n normalization 00447 } 00448 else 00449 { 00450 costs[1] = - output[0]; // 1/n normalization already accounted for 00451 } 00452 } 00453 00454 TVec<string> ManifoldParzen::getTestCostNames() const 00455 { 00456 // Return the names of the costs computed by computeCostsFromOutputs 00457 // (these may or may not be exactly the same as what's returned by 00458 // getTrainCostNames). 00459 00460 TVec<string> cost_names(0); 00461 00462 cost_names.append( "class_error" ); 00463 cost_names.append( "NLL" ); 00464 00465 return cost_names; 00466 } 00467 00468 TVec<string> ManifoldParzen::getTrainCostNames() const 00469 { 00470 TVec<string> cost_names(0); 00471 return cost_names ; 00472 } 00473 00474 void ManifoldParzen::setTrainingSet(VMat training_set, bool call_forget) 00475 { 00476 inherited::setTrainingSet(training_set,call_forget); 00477 00478 // Separate classes 00479 if( nclasses > 1 ) 00480 { 00481 class_datasets.resize(nclasses); 00482 for(int k=0; k<nclasses; k++) 00483 { 00484 class_datasets[k] = new ClassSubsetVMatrix(); 00485 class_datasets[k]->classes.resize(1); 00486 class_datasets[k]->classes[0] = k; 00487 class_datasets[k]->source = training_set; 00488 class_datasets[k]->build(); 00489 } 00490 } 00491 00492 } 00493 00494 } // end of namespace PLearn 00495 00496 00497 /* 00498 Local Variables: 00499 mode:c++ 00500 c-basic-offset:4 00501 c-file-style:"stroustrup" 00502 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00503 indent-tabs-mode:nil 00504 fill-column:79 00505 End: 00506 */ 00507 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :