PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // StackedModulesLearner.cc 00004 // 00005 // Copyright (C) 2006 Pascal Lamblin 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Pascal Lamblin 00036 00040 #include "StackedModulesLearner.h" 00041 #include <plearn/math/PRandom.h> 00042 #include <plearn_learners/online/OnlineLearningModule.h> 00043 #include <plearn_learners/online/SquaredErrModule.h> 00044 #include <plearn_learners/online/NLLErrModule.h> 00045 00046 namespace PLearn { 00047 using namespace std; 00048 00049 PLEARN_IMPLEMENT_OBJECT( 00050 StackedModulesLearner, 00051 "Trains a stack of OnlineLearningModule, which are layers.", 00052 "The OnlineLearningModule's are disposed like superposed layers:\n" 00053 "outputs of module i are the inputs of module (i+1), the last layer is\n" 00054 "the output layer.\n" 00055 "Another TVec of modules contains the cost modules. The first one is" 00056 " used\n" 00057 "during the training phase as the cost to minimize, the other ones are" 00058 " only\n" 00059 "measured.\n"); 00060 00061 StackedModulesLearner::StackedModulesLearner() 00062 : cost_funcs( 1, "mse" ), 00063 hessian_estimation( "none" ), 00064 nmodules( 0 ), 00065 ncosts( 1 ) 00066 { 00067 random_gen = new PRandom(); 00068 } 00069 00070 void StackedModulesLearner::declareOptions(OptionList& ol) 00071 { 00072 declareOption(ol, "modules", &StackedModulesLearner::modules, 00073 OptionBase::buildoption, 00074 "Layers of the learner"); 00075 00076 declareOption(ol, "cost_funcs", &StackedModulesLearner::cost_funcs, 00077 OptionBase::buildoption, 00078 "Names of the cost functions to apply on output. First one\n" 00079 "will be used ass the cost function to optimize. For the" 00080 " moment,\n" 00081 "supported value are:\n" 00082 " - \"mse\" (default)\n" 00083 " - \"NLL\"\n"); 00084 00085 declareOption(ol, "hessian_estimation", 00086 &StackedModulesLearner::hessian_estimation, 00087 OptionBase::buildoption, 00088 "Estimation of the second-order terms. One of:\n" 00089 " - \"none\": using only first-order derivative for" 00090 " update,\n" 00091 " - \"diag\": estimating the diagonal of the hessian,\n" 00092 " - \"simpler_diag\": positive estimation of the diagonal\n" 00093 ); 00094 00095 declareOption(ol, "random_gen", &StackedModulesLearner::random_gen, 00096 OptionBase::buildoption, 00097 "Random numbers generator."); 00098 00099 declareOption(ol, "nmodules", &StackedModulesLearner::nmodules, 00100 OptionBase::learntoption, 00101 "Number of module layers"); 00102 00103 declareOption(ol, "cost_modules", &StackedModulesLearner::cost_modules, 00104 OptionBase::learntoption, 00105 "Modules that will compute the costs"); 00106 00107 declareOption(ol, "ncosts", &StackedModulesLearner::ncosts, 00108 OptionBase::learntoption, 00109 "Number of cost modules"); 00110 00111 // Now call the parent class' declareOptions 00112 inherited::declareOptions(ol); 00113 } 00114 00115 void StackedModulesLearner::build_() 00116 { 00117 // ### This method should do the real building of the object, 00118 // ### according to set 'options', in *any* situation. 00119 // ### Typical situations include: 00120 // ### - Initial building of an object from a few user-specified options 00121 // ### - Building of a "reloaded" object: i.e. from the complete set of 00122 // ### all serialised options. 00123 // ### - Updating or "re-building" of an object after a few "tuning" 00124 // ### options have been modified. 00125 // ### You should assume that the parent class' build_() has already been 00126 // ### called. 00127 00128 // build the layers 00129 buildOptions(); 00130 00131 // initialize random generator from seed 00132 random_gen->manual_seed( seed_ ); 00133 00134 // if train_set is not set, we don't know inputsize nor targetsize 00135 if( inputsize_ >= 0 ) // we don't use inputsize() because it crashes if <0 00136 buildLayers(); 00137 00138 // build cost functions 00139 buildCostFunctions(); 00140 } 00141 00142 void StackedModulesLearner::buildOptions() 00143 { 00144 nmodules = modules.length(); 00145 ncosts = cost_funcs.length(); 00146 00147 // check length of cost_modules 00148 int ncm = cost_modules.length(); 00149 if( ncm != 0 && ncm != ncosts ) 00150 PLWARNING( "StackedModulesLearner::buildOptions(): 'cost_modules' is" 00151 " set,\n" 00152 "but its length differ from 'cost_funcs.length()'" 00153 " (%d != %d).\n", 00154 ncm, ncosts); 00155 cost_modules.resize( ncosts ); 00156 00157 // check string's values 00158 for( int i=0 ; i<ncosts ; i++ ) 00159 { 00160 string cf = lowerstring( cost_funcs[i] ); 00161 if( cf == "nll" ) 00162 cost_funcs[i] = "NLL"; 00163 else if( cf == "mse" || cf == "" ) 00164 cost_funcs[i] = "mse"; 00165 else 00166 PLERROR( "StackedModulesLearner::buildOptions(): cost function\n" 00167 "'%s' is unknown.\n", cost_funcs[i].c_str() ); 00168 } 00169 00170 // hessian estimation 00171 string h_est = lowerstring( hessian_estimation ); 00172 if( h_est == "none" || h_est == "" ) 00173 hessian_estimation = "none"; 00174 else if( h_est == "diag" ) 00175 hessian_estimation = h_est; 00176 else if( h_est == "simpler_diag" ) 00177 hessian_estimation = h_est; 00178 else 00179 PLERROR( "StackedModulesLearner::buildOptions(): hessian_estimation\n" 00180 "value '%s' is unknown.\n", hessian_estimation.c_str() ); 00181 } 00182 00183 void StackedModulesLearner::buildCostFunctions() 00184 { 00185 // build cost functions 00186 for( int i=0 ; i<ncosts ; i++ ) 00187 { 00188 string cf = cost_funcs[i]; 00189 if( cf == "mse" ) 00190 { 00191 PP<SquaredErrModule> p_mse; 00192 // if the first module is not already a SquaredErrModule, 00193 // allocate a new one 00194 if( !(p_mse = dynamic_cast<SquaredErrModule*>( 00195 (OnlineLearningModule*) cost_modules[i] )) ) 00196 { 00197 p_mse = new SquaredErrModule(); 00198 cost_modules[i] = p_mse; 00199 } 00200 } 00201 else if( cf == "NLL" ) 00202 { 00203 PP<NLLErrModule> p_nll; 00204 // if the first module is not already a NLLErrModule, 00205 // allocate a new one 00206 if( !(p_nll = dynamic_cast<NLLErrModule*>( 00207 (OnlineLearningModule*) cost_modules[i] )) ) 00208 { 00209 p_nll = new NLLErrModule(); 00210 cost_modules[i] = p_nll; 00211 } 00212 } 00213 00214 cost_modules[i]->input_size = outputsize(); 00215 if( hessian_estimation == "diag" ) 00216 cost_modules[i]->estimate_simpler_diag_hessian = false; 00217 else 00218 cost_modules[i]->estimate_simpler_diag_hessian = true; 00219 cost_modules[i]->build(); 00220 } 00221 } 00222 00223 void StackedModulesLearner::buildLayers() 00224 { 00225 // first values will be "input" values 00226 int size = inputsize(); 00227 values.resize( nmodules+1 ); 00228 values[0].resize( size ); 00229 gradients.resize( nmodules+1 ); 00230 gradients[0].resize( size ); 00231 if( hessian_estimation != "none" ) 00232 { 00233 diag_hessians.resize( nmodules+1 ); 00234 diag_hessians[0].resize( size ); 00235 } 00236 00237 for( int i=0 ; i<nmodules ; i++ ) 00238 { 00239 PP<OnlineLearningModule> p_module = modules[i]; 00240 if( p_module->input_size != size ) 00241 { 00242 PLWARNING( "StackedModulesLearner::buildLayers(): module '%d'\n" 00243 "has an input size of '%d', but previous layer's output" 00244 " size\n" 00245 "is '%d'. Resizing module '%d'.\n", 00246 i, p_module->input_size, size, i); 00247 p_module->input_size = size; 00248 } 00249 00250 if( hessian_estimation == "diag" ) 00251 p_module->estimate_simpler_diag_hessian = false; 00252 else 00253 p_module->estimate_simpler_diag_hessian = true; 00254 00255 p_module->build(); 00256 00257 size = p_module->output_size; 00258 values[i+1].resize( size ); 00259 gradients[i+1].resize( size ); 00260 if( hessian_estimation != "none" ) 00261 diag_hessians[i+1].resize( size ); 00262 } 00263 } 00264 00265 // ### Nothing to add here, simply calls build_ 00266 void StackedModulesLearner::build() 00267 { 00268 inherited::build(); 00269 build_(); 00270 } 00271 00272 00273 void StackedModulesLearner::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00274 { 00275 inherited::makeDeepCopyFromShallowCopy(copies); 00276 00277 // ### Call deepCopyField on all "pointer-like" fields 00278 // ### that you wish to be deepCopied rather than 00279 // ### shallow-copied. 00280 // ### ex: 00281 // deepCopyField(trainvec, copies); 00282 00283 deepCopyField(modules, copies); 00284 deepCopyField(cost_funcs, copies); 00285 deepCopyField(random_gen, copies); 00286 deepCopyField(cost_modules, copies); 00287 deepCopyField(values, copies); 00288 deepCopyField(gradients, copies); 00289 deepCopyField(diag_hessians, copies); 00290 } 00291 00292 00293 int StackedModulesLearner::outputsize() const 00294 { 00295 // Compute and return the size of this learner's output (which typically 00296 // may depend on its inputsize(), targetsize() and set options). 00297 if( nmodules < 0 || values.length() <= nmodules ) 00298 return -1; 00299 else 00300 return values[ nmodules ].length(); 00301 } 00302 00303 void StackedModulesLearner::forget() 00304 { 00308 00315 random_gen->manual_seed( seed_ ); 00316 00317 // reset inputs 00318 values[0].clear(); 00319 gradients[0].clear(); 00320 if( hessian_estimation != "none" ) 00321 diag_hessians[0].clear(); 00322 00323 // reset modules and outputs 00324 for( int i=0 ; i<nmodules ; i++ ) 00325 { 00326 modules[i]->forget(); 00327 values[i+1].clear(); 00328 gradients[i+1].clear(); 00329 if( hessian_estimation != "none" ) 00330 diag_hessians[i+1].clear(); 00331 } 00332 00333 stage = 0; 00334 } 00335 00336 void StackedModulesLearner::train() 00337 { 00338 // The role of the train method is to bring the learner up to 00339 // stage==nstages, updating train_stats with training costs measured 00340 // on-line in the process. 00341 00342 /* TYPICAL CODE: 00343 00344 static Vec input; // static so we don't reallocate memory each time... 00345 static Vec target; // (but be careful that static means shared!) 00346 input.resize(inputsize()); // the train_set's inputsize() 00347 target.resize(targetsize()); // the train_set's targetsize() 00348 real weight; 00349 00350 // This generic PLearner method does a number of standard stuff useful for 00351 // (almost) any learner, and return 'false' if no training should take 00352 // place. See PLearner.h for more details. 00353 if (!initTrain()) 00354 return; 00355 00356 while(stage<nstages) 00357 { 00358 // clear statistics of previous epoch 00359 train_stats->forget(); 00360 00361 //... train for 1 stage, and update train_stats, 00362 // using train_set->getExample(input, target, weight) 00363 // and train_stats->update(train_costs) 00364 00365 ++stage; 00366 train_stats->finalize(); // finalize statistics for this epoch 00367 } 00368 */ 00369 00370 Vec input( inputsize() ); 00371 Vec target( targetsize() ); 00372 real weight; 00373 Vec train_costs( ncosts ); 00374 Vec output( outputsize() ); 00375 int nsamples = train_set->length(); 00376 00377 if( !initTrain() ) 00378 return; 00379 00380 for( ; stage < nstages ; stage++ ) 00381 { 00382 // clear stats of previous epoch 00383 train_stats->forget(); 00384 for( int sample=0 ; sample < nsamples ; sample++ ) 00385 { 00386 train_set->getExample( sample, input, target, weight ); 00387 00388 // fprop 00389 computeOutputAndCosts(input, target, output, train_costs); 00390 output.append( target ); 00391 00392 // bprop 00393 Vec out_gradient(1,1); // the gradient wrt the cost is '1' 00394 Vec out_dh(1); // the hessian wrt the cost is '0' 00395 00396 if( hessian_estimation != "none" ) // bbpropUpdate 00397 { 00398 cost_modules[0]->bbpropUpdate( output, 00399 train_costs.subVec(0,1), 00400 gradients[ nmodules ], 00401 out_gradient, 00402 diag_hessians[ nmodules ], 00403 out_dh ); 00404 00405 for( int i=nmodules-1 ; i>=0 ; i-- ) 00406 modules[i]->bbpropUpdate( values[i], values[i+1], 00407 gradients[i], gradients[i+1], 00408 diag_hessians[i], 00409 diag_hessians[i+1] ); 00410 } 00411 else // bpropUpdate 00412 { 00413 cost_modules[0]->bpropUpdate( output, 00414 train_costs.subVec(0,1), 00415 gradients[ nmodules ], 00416 out_gradient ); 00417 00418 for( int i=nmodules-1 ; i>=0 ; i-- ) 00419 modules[i]->bpropUpdate( values[i], values[i+1], 00420 gradients[i], gradients[i+1] ); 00421 } 00422 00423 train_stats->update( train_costs ); 00424 } 00425 train_stats->finalize(); // finalize statistics for this epoch 00426 } 00427 } 00428 00429 00430 void StackedModulesLearner::computeOutput(const Vec& input, Vec& output) const 00431 { 00432 values[0] << input; 00433 00434 // fprop 00435 for( int i=0 ; i<nmodules ; i++ ) 00436 modules[i]->fprop( values[i], values[i+1] ); 00437 00438 output.resize( outputsize() ); 00439 output << values[ nmodules ]; 00440 } 00441 00442 void StackedModulesLearner::computeCostsFromOutputs(const Vec& input, 00443 const Vec& output, 00444 const Vec& target, 00445 Vec& costs) const 00446 { 00447 Vec out_tgt = output.copy(); 00448 out_tgt.append( target ); 00449 for( int i=0 ; i<ncosts ; i++ ) 00450 { 00451 Vec cost(1); 00452 cost_modules[i]->fprop( out_tgt, cost ); 00453 costs[i] = cost[0]; 00454 } 00455 } 00456 00457 TVec<string> StackedModulesLearner::getTestCostNames() const 00458 { 00459 // Return the names of the costs computed by computeCostsFromOutputs 00460 return cost_funcs; 00461 } 00462 00463 TVec<string> StackedModulesLearner::getTrainCostNames() const 00464 { 00465 // Return the names of the objective costs that the train method computes 00466 // and for which it updates the VecStatsCollector train_stats 00467 return cost_funcs; 00468 } 00469 00470 00471 } // end of namespace PLearn 00472 00473 00474 /* 00475 Local Variables: 00476 mode:c++ 00477 c-basic-offset:4 00478 c-file-style:"stroustrup" 00479 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00480 indent-tabs-mode:nil 00481 fill-column:79 00482 End: 00483 */ 00484 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :