PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // OnlineLearningModule.cc 00004 // 00005 // Copyright (C) 2005 Yoshua Bengio 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 /* ******************************************************* 00036 * $Id: .pyskeleton_header 544 2003-09-01 00:05:31Z plearner $ 00037 ******************************************************* */ 00038 00039 // Authors: Yoshua Bengio 00040 00044 #include "OnlineLearningModule.h" 00045 #include <plearn/base/RemoteDeclareMethod.h> 00046 00047 namespace PLearn { 00048 using namespace std; 00049 00050 PLEARN_IMPLEMENT_ABSTRACT_OBJECT( 00051 OnlineLearningModule, 00052 "Learn to map inputs to outputs, online, using caller-provided gradients.", 00053 "This pure virtual class (i.e. an interface) can basically do two things:\n" 00054 " * map its inputs to its outputs\n" 00055 " * modify itself when told in what direction the output should have \n" 00056 " changed (i.e. output gradient), while optionally giving back the \n" 00057 " information about how the input should also have changed \n" 00058 " (i.e. input gradient)\n" 00059 "The main methods are fprop methods (which maps inputs to outputs) and bprop\n" 00060 "methods (which map outputs gradients into inputs gradients and update internal\n" 00061 "parameters). Changes to options should not occur between an fprop and\n" 00062 "the corresponding bprop.\n" 00063 ); 00064 00065 bool OnlineLearningModule::during_training=false; 00066 00068 // OnlineLearningModule // 00070 OnlineLearningModule::OnlineLearningModule(const string& the_name, 00071 bool call_build_): 00072 inherited(call_build_), 00073 input_size(-1), 00074 output_size(-1), 00075 name(the_name), 00076 estimate_simpler_diag_hessian(false), 00077 use_fast_approximations(true), 00078 verbosity(1) 00079 { 00080 if (call_build_) { 00081 if (the_name.empty()) 00082 PLERROR("In OnlineLearningModule::OnlineLearningModule - You " 00083 "cannot create a new OnlineLearningModule with an empty " 00084 "name and call build within the constructor itself"); 00085 build_(); 00086 } 00087 } 00088 00090 // fprop // 00092 void OnlineLearningModule::fprop(const Mat& inputs, Mat& outputs) 00093 { 00094 PLERROR("In OnlineLearningModule::fprop - The mini-batch version of " 00095 "'fprop' for class '%s' is not implemented. Implementation is " 00096 "required out of safety, to ensure a subsequent call to " 00097 "'bpropUpdate' can use the correctly updated data", 00098 classname().c_str()); 00099 } 00100 00101 void OnlineLearningModule::fprop(const Vec& inputs, Vec& outputs) const 00102 { 00103 PLERROR("In OnlineLearningModule::fprop - This variant is deprecated, use fprop(ports_value)\n"); 00104 } 00105 00106 void OnlineLearningModule::fprop(const TVec<Mat*>& ports_value) 00107 { 00108 PLASSERT( ports_value.length() == nPorts() ); 00109 if (ports_value.length() == 2) 00110 { 00111 Mat* m1 = ports_value[0]; 00112 Mat* m2 = ports_value[1]; 00113 if (m1 && m2 && !m1->isEmpty() && m2->isEmpty()) { 00114 // We can re-use previous code for standard mini-batch fprop. 00115 fprop(*m1, *m2); 00116 checkProp(ports_value); 00117 return; 00118 } 00119 } 00120 PLERROR("In OnlineLearningModule::fprop - Port configuration not " 00121 "implemented for class '%s'", classname().c_str()); 00122 } 00123 00125 // bpropAccUpdate // 00127 void OnlineLearningModule::bpropAccUpdate(const TVec<Mat*>& ports_value, 00128 const TVec<Mat*>& ports_gradient) 00129 { 00130 if (ports_gradient.length() == 2) { 00131 Mat* input_grad = ports_gradient[0]; 00132 Mat* output_grad = ports_gradient[1]; 00133 if (!input_grad && !output_grad) { 00134 // Nothing to do. 00135 return; 00136 } 00137 if (output_grad && !output_grad->isEmpty() && 00138 (!input_grad || input_grad->isEmpty())) 00139 { 00140 // We can try to re-use the standard mini-batch bpropUpdate method. 00141 if (!input_grad) { 00142 // We are not interested in the input gradient: use a dummy 00143 // matrix to store it. 00144 input_grad = &tmpm_input_gradient; 00145 } 00146 Mat* input_val = ports_value[0]; 00147 Mat* output_val = ports_value[1]; 00148 PLASSERT( input_val && output_val ); 00149 input_grad->resize(input_val->length(), input_val->width()); 00150 bpropUpdate(*input_val, *output_val, *input_grad, *output_grad, 00151 true); 00152 checkProp(ports_gradient); 00153 return; 00154 } 00155 } 00156 PLERROR("In OnlineLearningModule::bpropAccUpdate - Port configuration " 00157 "not implemented for class '%s'", classname().c_str()); 00158 } 00159 00160 00162 // bpropUpdate // 00164 void OnlineLearningModule::bpropUpdate(const Vec& input, const Vec& output, 00165 Vec& input_gradient, 00166 const Vec& output_gradient, 00167 bool accumulate) 00168 { 00169 PLERROR("In OnlineLearningModule.cc: method 'bpropUpdate' not" 00170 " implemented.\n" 00171 "Please implement it in your derived class (%s) or do not call" 00172 " bpropUpdate.", classname().c_str()); 00173 } 00174 00175 void OnlineLearningModule::bpropUpdate(const Vec& input, const Vec& output, 00176 const Vec& output_gradient) 00177 { 00178 bpropUpdate(input, output, tmp_input_gradient, output_gradient); 00179 } 00180 00181 void OnlineLearningModule::bpropUpdate(const Mat& inputs, const Mat& outputs, 00182 Mat& input_gradients, 00183 const Mat& output_gradients, 00184 bool accumulate) 00185 { 00186 PLERROR("In OnlineLearningModule::bpropUpdate - The mini-batch version of " 00187 "'bpropUpdate' for class '%s' is not implemented. Implementation " 00188 "is required since this method must be called immediately after " 00189 "a 'fprop'", classname().c_str()); 00190 } 00191 00192 void OnlineLearningModule::bpropUpdate(const Mat& inputs, const Mat& outputs, 00193 const Mat& output_gradients) 00194 { 00195 bpropUpdate(inputs, outputs, tmpm_input_gradient, output_gradients); 00196 } 00197 00198 void OnlineLearningModule::bpropUpdate(const TVec<Mat*>& ports_value, 00199 const TVec<Mat*>& ports_gradient) 00200 { 00201 for (int i = 0; i < ports_gradient.length(); i++) { 00202 Mat* grad = ports_gradient[i]; 00203 if (grad && grad->isEmpty()) { 00204 // This gradient must be computed (= cleared + accumulated). 00205 Mat* val = ports_value[i]; 00206 if (!val) 00207 PLERROR("In OnlineLearningModule::bpropUpdate - Cannot compute" 00208 " the gradient of a port whose value is not available," 00209 " since we cannot easily know its size"); 00210 grad->resize(val->length(), val->width()); 00211 grad->fill(0); // Clear the gradient. 00212 grad->resize(0, grad->width()); // So it is accumulated later. 00213 } 00214 } 00215 bpropAccUpdate(ports_value, ports_gradient); 00216 } 00217 00219 // bbpropUpdate // 00221 // Default implementations compile but crash at run-time if not implemented in 00222 // sub-classes. 00223 void OnlineLearningModule::bbpropUpdate(const Vec& input, const Vec& output, 00224 const Vec& output_gradient, 00225 const Vec& output_diag_hessian) 00226 { 00227 bbpropUpdate(input, output, tmp_input_gradient, output_gradient, 00228 tmp_input_diag_hessian, output_diag_hessian); 00229 } 00230 00231 void OnlineLearningModule::bbpropUpdate(const Vec& input, const Vec& output, 00232 Vec& input_gradient, 00233 const Vec& output_gradient, 00234 Vec& input_diag_hessian, 00235 const Vec& output_diag_hessian, 00236 bool accumulate) 00237 { 00238 PLERROR("In OnlineLearningModule.cc: method 'bbpropUpdate' not" 00239 "implemented.\n" 00240 "Please implement it in your derived class, or use" 00241 "'bpropUpdate'.\n"); 00242 } 00243 00245 // setLearningRate // 00247 void OnlineLearningModule::setLearningRate( real dynamic_learning_rate ) 00248 { 00249 PLWARNING("In OnlineLearningModule::setLearningRate - The derived class " 00250 "(%s) does not have a learning rate that can be changed from " 00251 "outside. If it should have one, please implement setLearningRate " 00252 "in it", classname().c_str()); 00253 } 00254 00255 00257 // build // 00259 void OnlineLearningModule::build() 00260 { 00261 inherited::build(); 00262 build_(); 00263 } 00264 00266 // makeDeepCopyFromShallowCopy // 00268 void OnlineLearningModule::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00269 { 00270 inherited::makeDeepCopyFromShallowCopy(copies); 00271 deepCopyField(random_gen, copies); 00272 deepCopyField(port_sizes, copies); 00273 deepCopyField(tmp_input_gradient, copies); 00274 deepCopyField(tmpm_input_gradient, copies); 00275 deepCopyField(tmp_input_diag_hessian, copies); 00276 } 00277 00279 // declareOptions // 00281 void OnlineLearningModule::declareOptions(OptionList& ol) 00282 { 00283 declareOption(ol, "input_size", &OnlineLearningModule::input_size, 00284 OptionBase::buildoption, 00285 "Size of the input"); 00286 00287 declareOption(ol, "output_size", &OnlineLearningModule::output_size, 00288 OptionBase::buildoption, 00289 "Size of the output"); 00290 00291 declareOption(ol, "name", &OnlineLearningModule::name, 00292 OptionBase::buildoption, 00293 "Name of the module (if not provided, the class name is used)."); 00294 00295 declareOption(ol, "use_fast_approximations", &OnlineLearningModule::use_fast_approximations, 00296 OptionBase::buildoption, 00297 "Use tables to approximate nonlinearities such as sigmoid, tanh, and softplus\n"); 00298 00299 declareOption(ol, "estimate_simpler_diag_hessian", 00300 &OnlineLearningModule::estimate_simpler_diag_hessian, 00301 OptionBase::buildoption, 00302 "Should we compute a simpler diagonal estimation of the" 00303 " input Hessian\n" 00304 "matrix, using only the first (positive) term in:\n" 00305 " d²C/dx² ~= d²C/dy² (dy/dx)² [+ dC/dy d²y/dx²]\n"); 00306 00307 00308 declareOption(ol, "expdir", &OnlineLearningModule::expdir, 00309 OptionBase::buildoption, 00310 "Path of the directory associated with this module,\n" 00311 "in which it should save any file it wishes to create. \n" 00312 "The directory will be created if it does not already" 00313 " exist.\n" 00314 "If expdir is the empty string (the default),\n" 00315 "then the module should not create *any* file.\n"); 00316 00317 declareOption(ol, "random_gen", 00318 &OnlineLearningModule::random_gen, 00319 OptionBase::buildoption, 00320 "Pointer to an optional random number generator,\n" 00321 "e.g. for initializing parameters or any non-deterministic" 00322 " operation\n" 00323 "required by the module.\n"); 00324 00325 declareOption(ol, "verbosity", &OnlineLearningModule::verbosity, 00326 OptionBase::buildoption, 00327 "Controls the level of verbosity of the module.", 00328 OptionBase::advanced_level); 00329 00330 inherited::declareOptions(ol); 00331 } 00332 00334 // declareMethods // 00336 void OnlineLearningModule::declareMethods(RemoteMethodMap& rmm) 00337 { 00338 // Insert a backpointer to remote methods; note that this 00339 // different than for declareOptions() 00340 rmm.inherited(inherited::_getRemoteMethodMap_()); 00341 00342 declareMethod( 00343 rmm, "getPorts", &OnlineLearningModule::getPorts, 00344 (BodyDoc("Return the list of port names of the module\n"), 00345 RetDoc ("The list of port names"))); 00346 00347 declareMethod( 00348 rmm, "forget", &OnlineLearningModule::forget, 00349 (BodyDoc("Reset the parameters to the state they would be before starting training.\n" 00350 "This may involve randomization using the random generator.\n"))); 00351 00352 declareMethod( 00353 rmm, "namedFprop", &OnlineLearningModule::namedFprop, 00354 (BodyDoc("Perform the fprop computation on an OnlineLearningModule, which takes matrices\n" 00355 "in user-selected input ports and computes outputs in user-selected output-ports.\n" 00356 "The function actually computed by the module depends on the selected ports and\n" 00357 "on its internal state (options and parameters)\n"), 00358 ArgDoc ("inputs", "A dictionary of input matrices (one for each input port), indexed by the port names,\n"), 00359 ArgDoc ("wanted_outputs", "A list of wanted output port names,\n"), 00360 RetDoc ("A dictionary of the input and output matrices (indexed by their name).\n"))); 00361 00362 declareMethod( 00363 rmm, "namedBpropAccUpdate", &OnlineLearningModule::namedBpropAccUpdate, 00364 (BodyDoc("Perform the BpropAccUpdate computation on an OnlineLearningModule, which\n" 00365 "takes matrices in user-selected input ports, output ports, and output\n" 00366 "gradient ports and computes gradients for user-selected input ports.\n" 00367 "The function actually computed by the module depends on the selected ports\n" 00368 "and on its internal state (options and parameters)\n"), 00369 ArgDoc ("values", "A dictionary of named input and output matrices that was\n" 00370 "returned by namedFprop (one entry for each input and output port used).\n"), 00371 ArgDoc ("gradients", "A dictionary of named output (and possibly input) gradient\n" 00372 "matrices (the name indexing each matrix is the name of corresponding port).\n" 00373 "Output gradient matrices should be full, whereas input gradient matrices\n" 00374 "into which to accumulate should have lenght 0 and correct width.\n"), 00375 ArgDoc ("additional_input_gradients", "A list of wanted input port names,\n" 00376 "for which the gradient is desired (no accumulation)\n"), 00377 RetDoc ("A dictionary of all the input and output gradient matrices (indexed\n" 00378 "by their port name), including those in the gradients argument\n" 00379 "and those named in the additional_input_gradiaents argument.\n"))); 00380 00381 declareMethod( 00382 rmm, "setLearningRate", &OnlineLearningModule::setLearningRate, 00383 (BodyDoc("Allows to change the learning rate or equivalent parameter"), 00384 ArgDoc ("dynamic_learning_rate", 00385 "The value we want for the learning rate") 00386 )); 00387 00388 } 00389 00390 map<string,Mat> OnlineLearningModule::namedFprop(map<string,Mat>& inputs, TVec<string> wanted_outputs) 00391 { 00392 map<string,Mat> outputs; 00393 TVec<string> port_names = getPorts(); 00394 TVec<Mat*> ports_value(nPorts()); 00395 map<string,Mat>::iterator it=inputs.begin(); 00396 for (;it!=inputs.end();++it) 00397 { 00398 int port_index=getPortIndex(it->first); 00399 PLASSERT_MSG(port_index>=0,"Unknown port name: "+it->first); 00400 ports_value[port_index]= &it->second; 00401 } 00402 for (int i=0;i<wanted_outputs.length();i++) 00403 { 00404 int port_index=getPortIndex(wanted_outputs[i]); 00405 PLASSERT_MSG(port_index>=0,"Unknown port name: "+wanted_outputs[i]); 00406 ports_value[port_index]= new Mat(0,0); 00407 } 00408 fprop(ports_value); 00409 for (it=inputs.begin();it!=inputs.end();++it) 00410 outputs[it->first]=it->second; 00411 for (int i=0;i<wanted_outputs.length();i++) 00412 outputs[wanted_outputs[i]]= *ports_value[getPortIndex(wanted_outputs[i])]; 00413 return outputs; 00414 } 00415 00416 map<string,Mat> OnlineLearningModule::namedBpropAccUpdate(map<string,Mat>& values, 00417 map<string,Mat>& gradients, 00418 TVec<string> additional_input_gradients) 00419 { 00420 map<string,Mat> all_gradients; 00421 TVec<string> port_names = getPorts(); 00422 TVec<Mat*> ports_value(nPorts()); 00423 TVec<Mat*> ports_gradient(nPorts()); 00424 map<string,Mat>::iterator it=values.begin(); 00425 for (;it!=values.end();++it) 00426 ports_value[getPortIndex(it->first)]= &it->second; 00427 it=gradients.begin(); 00428 for (;it!=gradients.end();++it) 00429 ports_gradient[getPortIndex(it->first)]= &it->second; 00430 for (int i=0;i<additional_input_gradients.length();i++) 00431 { 00432 Mat port_value = values[additional_input_gradients[i]]; 00433 // the additional input gradients are to be initialized as zero matrices 00434 Mat* port_gradient = new Mat(port_value.length(),port_value.width()); 00435 port_gradient->resize(0,port_value.width()); 00436 ports_gradient[getPortIndex(additional_input_gradients[i])]= port_gradient; 00437 } 00438 bpropAccUpdate(ports_value,ports_gradient); 00439 for (it=gradients.begin();it!=gradients.end();++it) 00440 all_gradients[it->first]=it->second; 00441 for (int i=0;i<additional_input_gradients.length();i++) 00442 all_gradients[additional_input_gradients[i]]= 00443 *ports_gradient[getPortIndex(additional_input_gradients[i])]; 00444 return all_gradients; 00445 } 00446 00447 00449 // build_ // 00451 void OnlineLearningModule::build_() 00452 { 00453 if (name.empty()) 00454 name = classname(); 00455 } 00456 00458 // checkProp // 00460 void OnlineLearningModule::checkProp(const TVec<Mat*>& ports_data) 00461 { 00462 #ifdef BOUNDCHECK 00463 for (int i = 0; i < ports_data.length(); i++) { 00464 if (ports_data[i] && ports_data[i]->isEmpty()) 00465 PLERROR("In OnlineLearningModule::checkProp - Data for port '%s' " 00466 "of module '%s' (of class '%s') was not properly computed " 00467 "(this may have happened at the end of a fprop or a " 00468 "bpropAccUpdate)", getPortName(i).c_str(), name.c_str(), 00469 classname().c_str()); 00470 } 00471 #endif 00472 } 00473 00475 // getPortIndex // 00477 int OnlineLearningModule::getPortIndex(const string& port) 00478 { 00479 return getPorts().find(port); 00480 } 00481 00483 // getPortName // 00485 string OnlineLearningModule::getPortName(int i) 00486 { 00487 return getPorts()[i]; 00488 } 00489 00491 // getPorts // 00493 const TVec<string>& OnlineLearningModule::getPorts() { 00494 static TVec<string> default_ports; 00495 if (default_ports.isEmpty()) { 00496 default_ports.append("input"); 00497 default_ports.append("output"); 00498 } 00499 return default_ports; 00500 } 00501 00503 // getPortSizes // 00505 const TMat<int>& OnlineLearningModule::getPortSizes() { 00506 int n_ports = nPorts(); 00507 if (port_sizes.length() != n_ports) { 00508 port_sizes.resize(n_ports, 2); 00509 port_sizes.fill(-1); 00510 if (n_ports >= 2) { 00511 port_sizes(0, 1) = input_size; 00512 port_sizes(1, 1) = output_size; 00513 } 00514 } 00515 return port_sizes; 00516 } 00517 00519 // getPortLength // 00521 int OnlineLearningModule::getPortLength(const string& port) 00522 { 00523 int port_index = getPortIndex(port); 00524 if (port_index < 0) 00525 PLERROR("In OnlineLearningModule::getPortLength - Port '%s' not known " 00526 "by module '%s' of class '%s'", 00527 port.c_str(), name.c_str(), classname().c_str()); 00528 return getPortSizes()(port_index, 0); 00529 } 00530 00532 // getPortWidth // 00534 int OnlineLearningModule::getPortWidth(const string& port) 00535 { 00536 PLASSERT( getPortIndex(port) >= 0 ); 00537 return getPortSizes()(getPortIndex(port), 1); 00538 } 00539 00541 // nPorts // 00543 int OnlineLearningModule::nPorts() 00544 { 00545 return getPorts().length(); 00546 } 00547 00548 00549 } // end of namespace PLearn 00550 00551 00552 /* 00553 Local Variables: 00554 mode:c++ 00555 c-basic-offset:4 00556 c-file-style:"stroustrup" 00557 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00558 indent-tabs-mode:nil 00559 fill-column:79 00560 End: 00561 */ 00562 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :