PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // ModulesLearner.cc 00004 // 00005 // Copyright (C) 2006 Pascal Lamblin 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Pascal Lamblin 00036 00040 #include "ModulesLearner.h" 00041 00042 namespace PLearn { 00043 using namespace std; 00044 00045 PLEARN_IMPLEMENT_OBJECT( 00046 ModulesLearner, 00047 "Trains an OnlineLearningModule wrt the cost of a CostModule.", 00048 "The CostModule provides the output gradient to train the\n" 00049 "OnlineLearningModule.\n" 00050 "In order to stack layers, you can use ModuleStackModule,\n" 00051 "and in order to compute several costs, you can use CombinedCostsModule.\n" 00052 ); 00053 00054 ModulesLearner::ModulesLearner() 00055 : hessian_estimation( "none" ) 00056 { 00057 random_gen = new PRandom(); 00058 } 00059 00060 void ModulesLearner::declareOptions(OptionList& ol) 00061 { 00062 declareOption(ol, "module", &ModulesLearner::module, 00063 OptionBase::buildoption, 00064 "The module to train"); 00065 00066 declareOption(ol, "cost", &ModulesLearner::cost, 00067 OptionBase::buildoption, 00068 "The cost module"); 00069 00070 declareOption(ol, "hessian_estimation", 00071 &ModulesLearner::hessian_estimation, 00072 OptionBase::buildoption, 00073 "Estimation of the second-order terms. One of:\n" 00074 " - \"none\": using only first-order derivative for" 00075 " update,\n" 00076 " - \"diag\": estimating the diagonal of the hessian,\n" 00077 " - \"simpler_diag\": positive estimation of the diagonal\n" 00078 ); 00079 00080 // Now call the parent class' declareOptions 00081 inherited::declareOptions(ol); 00082 } 00083 00084 void ModulesLearner::build_() 00085 { 00086 // hessian estimation 00087 string h_est = lowerstring( hessian_estimation ); 00088 if( h_est == "none" || h_est == "" ) 00089 hessian_estimation = "none"; 00090 else if( h_est == "diag" ) 00091 hessian_estimation = h_est; 00092 else if( h_est == "simpler_diag" ) 00093 hessian_estimation = h_est; 00094 else 00095 PLERROR( "ModulesLearner::buildOptions(): hessian_estimation\n" 00096 "value '%s' is unknown.\n", hessian_estimation.c_str() ); 00097 00098 if( hessian_estimation == "diag" ) 00099 cost->estimate_simpler_diag_hessian = false; 00100 else 00101 cost->estimate_simpler_diag_hessian = true; 00102 00103 // Assign random_gen to module and cost, unless they already have one 00104 if( !(module->random_gen) ) 00105 { 00106 module->random_gen = random_gen; 00107 module->forget(); 00108 } 00109 if( !(cost->random_gen) ) 00110 { 00111 cost->random_gen = random_gen; 00112 cost->forget(); 00113 } 00114 00115 // if train_set is not set, we don't know inputsize nor targetsize 00116 if( inputsize_ >= 0 ) // we don't use inputsize() because it crashes if <0 00117 { 00118 module->input_size = inputsize(); 00119 module->build(); 00120 00121 output.resize( outputsize() ); 00122 d_output.resize( outputsize() ); 00123 if( hessian_estimation != "none" ) 00124 d2_output.resize( outputsize() ); 00125 00126 cost->input_size = outputsize(); 00127 cost->target_size = targetsize(); 00128 cost->build(); 00129 00130 costs->resize( cost->output_size ); 00131 } 00132 } 00133 00134 00135 // ### Nothing to add here, simply calls build_ 00136 void ModulesLearner::build() 00137 { 00138 inherited::build(); 00139 build_(); 00140 } 00141 00142 00143 void ModulesLearner::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00144 { 00145 inherited::makeDeepCopyFromShallowCopy(copies); 00146 00147 // deepCopyField(trainvec, copies); 00148 00149 deepCopyField(module, copies); 00150 deepCopyField(cost, copies); 00151 deepCopyField(output, copies); 00152 deepCopyField(d_output, copies); 00153 deepCopyField(d2_output, copies); 00154 deepCopyField(costs, copies); 00155 } 00156 00157 00158 int ModulesLearner::outputsize() const 00159 { 00160 // Compute and return the size of this learner's output (which typically 00161 // may depend on its inputsize(), targetsize() and set options). 00162 if( module ) 00163 return module->output_size; 00164 else 00165 return -1; 00166 } 00167 00168 void ModulesLearner::forget() 00169 { 00173 00180 random_gen->manual_seed( seed_ ); 00181 00182 // reset temporary vectors 00183 output.clear(); 00184 d_output.clear(); 00185 if( d2_output ) 00186 d2_output.clear(); 00187 costs.clear(); 00188 00189 // reset module and cost 00190 module->forget(); 00191 cost->forget(); 00192 00193 stage = 0; 00194 } 00195 00196 void ModulesLearner::train() 00197 { 00198 // The role of the train method is to bring the learner up to 00199 // stage==nstages, updating train_stats with training costs measured 00200 // on-line in the process. 00201 00202 /* TYPICAL CODE: 00203 00204 static Vec input; // static so we don't reallocate memory each time... 00205 static Vec target; // (but be careful that static means shared!) 00206 input.resize(inputsize()); // the train_set's inputsize() 00207 target.resize(targetsize()); // the train_set's targetsize() 00208 real weight; 00209 00210 // This generic PLearner method does a number of standard stuff useful for 00211 // (almost) any learner, and return 'false' if no training should take 00212 // place. See PLearner.h for more details. 00213 if (!initTrain()) 00214 return; 00215 00216 while(stage<nstages) 00217 { 00218 // clear statistics of previous epoch 00219 train_stats->forget(); 00220 00221 //... train for 1 stage, and update train_stats, 00222 // using train_set->getExample(input, target, weight) 00223 // and train_stats->update(train_costs) 00224 00225 ++stage; 00226 train_stats->finalize(); // finalize statistics for this epoch 00227 } 00228 */ 00229 00230 Vec input( inputsize() ); 00231 Vec target( targetsize() ); 00232 real weight; 00233 int nsamples = train_set->length(); 00234 00235 if( !initTrain() ) 00236 return; 00237 00238 PP<ProgressBar> pb; 00239 if( report_progress ) 00240 pb = new ProgressBar( "Training " + classname() + " from stage " 00241 + tostring(stage) + " to " + tostring(nstages), 00242 nstages - stage ); 00243 00244 int initial_stage = stage; 00245 for( ; stage < nstages ; stage++ ) 00246 { 00247 // clear stats of previous epoch 00248 train_stats->forget(); 00249 for( int sample=0 ; sample < nsamples ; sample++ ) 00250 { 00251 train_set->getExample( sample, input, target, weight ); 00252 00253 // fprop 00254 module->fprop( input, output ); 00255 cost->fprop( output, target, costs ); 00256 00257 // bprop 00258 if( hessian_estimation != "none" ) // bbpropUpdate 00259 { 00260 cost->bbpropUpdate( output, target, costs[0], 00261 d_output, d2_output ); 00262 00263 module->bbpropUpdate( input, output, d_output, d2_output ); 00264 } 00265 else // bpropUpdate 00266 { 00267 cost->bpropUpdate( output, target, costs[0], d_output ); 00268 00269 module->bpropUpdate( input, output, d_output ); 00270 } 00271 00272 train_stats->update( costs ); 00273 } 00274 train_stats->finalize(); // finalize statistics for this epoch 00275 00276 if(pb) 00277 pb->update( stage+1 - initial_stage ); 00278 } 00279 } 00280 00281 00282 void ModulesLearner::computeOutput(const Vec& input, Vec& output) const 00283 { 00284 module->fprop( input, output ); 00285 } 00286 00287 void ModulesLearner::computeCostsFromOutputs(const Vec& input, 00288 const Vec& output, 00289 const Vec& target, 00290 Vec& costs) const 00291 { 00292 cost->fprop( output, target, costs ); 00293 } 00294 00295 TVec<string> ModulesLearner::getTestCostNames() const 00296 { 00297 // Return the names of the costs computed by computeCostsFromOutputs 00298 return cost->costNames(); 00299 } 00300 00301 TVec<string> ModulesLearner::getTrainCostNames() const 00302 { 00303 // Return the names of the objective costs that the train method computes 00304 // and for which it updates the VecStatsCollector train_stats 00305 return cost->costNames(); 00306 } 00307 00308 00309 } // end of namespace PLearn 00310 00311 00312 /* 00313 Local Variables: 00314 mode:c++ 00315 c-basic-offset:4 00316 c-file-style:"stroustrup" 00317 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00318 indent-tabs-mode:nil 00319 fill-column:79 00320 End: 00321 */ 00322 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :