PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // InferenceRBM.cc 00004 // 00005 // Copyright (C) 2008 Pascal Lamblin 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Pascal Lamblin 00036 00039 #define PL_LOG_MODULE_NAME "InferenceRBM" 00040 00041 #include "InferenceRBM.h" 00042 #include <plearn/io/pl_log.h> 00043 #include <plearn/base/RemoteDeclareMethod.h> 00044 00045 00046 namespace PLearn { 00047 using namespace std; 00048 00049 PLEARN_IMPLEMENT_OBJECT( 00050 InferenceRBM, 00051 "RBM to be used when doing joint supervised learning by CD.", 00052 "We have input, target and hidden layer. We can compute hidden given\n" 00053 "(input, target), target given input, or hidden given input." 00054 ); 00055 00056 InferenceRBM::InferenceRBM(): 00057 n_gibbs_steps(0), 00058 input_size(0), 00059 target_size(0), 00060 visible_size(0), 00061 hidden_size(0) 00062 { 00063 } 00064 00065 // ### Nothing to add here, simply calls build_ 00066 void InferenceRBM::build() 00067 { 00068 inherited::build(); 00069 build_(); 00070 } 00071 00072 void InferenceRBM::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00073 { 00074 inherited::makeDeepCopyFromShallowCopy(copies); 00075 00076 // deepCopyField(trainvec, copies); 00077 00078 deepCopyField(input_layer, copies); 00079 deepCopyField(target_layer, copies); 00080 deepCopyField(hidden_layer, copies); 00081 deepCopyField(input_to_hidden, copies); 00082 deepCopyField(target_to_hidden, copies); 00083 deepCopyField(random_gen, copies); 00084 deepCopyField(visible_layer, copies); 00085 deepCopyField(visible_to_hidden, copies); 00086 deepCopyField(v0, copies); 00087 deepCopyField(h0, copies); 00088 00089 } 00090 00091 void InferenceRBM::declareOptions(OptionList& ol) 00092 { 00093 // declareOption(ol, "myoption", &InferenceRBM::myoption, 00094 // OptionBase::buildoption, 00095 // "Help text describing this option"); 00096 00097 00098 declareOption(ol, "input_layer", &InferenceRBM::input_layer, 00099 OptionBase::buildoption, 00100 "Input layer (part of visible)"); 00101 00102 declareOption(ol, "target_layer", &InferenceRBM::target_layer, 00103 OptionBase::buildoption, 00104 "Target layer (part of visible)"); 00105 00106 declareOption(ol, "hidden_layer", &InferenceRBM::hidden_layer, 00107 OptionBase::buildoption, 00108 "Hidden layer"); 00109 00110 declareOption(ol, "input_to_hidden", &InferenceRBM::input_to_hidden, 00111 OptionBase::buildoption, 00112 "Connection between input and hidden layers"); 00113 00114 declareOption(ol, "target_to_hidden", &InferenceRBM::target_to_hidden, 00115 OptionBase::buildoption, 00116 "Connection between target and hidden layers"); 00117 00118 declareOption(ol, "exp_method", &InferenceRBM::exp_method, 00119 OptionBase::buildoption, 00120 "How to compute hidden and target expectation given input.\n" 00121 "Possible values are:\n" 00122 " - \"exact\": exact inference, O(target_size), default\n" 00123 " - \"gibbs\": estimation by Gibbs sampling\n" 00124 ); 00125 00126 declareOption(ol, "n_gibbs_steps", &InferenceRBM::n_gibbs_steps, 00127 OptionBase::buildoption, 00128 "Number of Gibbs steps to use if exp_method==\"gibbs\""); 00129 00130 declareOption(ol, "random_gen", &InferenceRBM::random_gen, 00131 OptionBase::buildoption, 00132 "Random numbers generator"); 00133 00134 declareOption(ol, "use_fast_approximations", 00135 &InferenceRBM::use_fast_approximations, 00136 OptionBase::buildoption, 00137 "Whether to use fast approximations in softplus computation"); 00138 00139 00140 declareOption(ol, "visible_layer", &InferenceRBM::visible_layer, 00141 OptionBase::learntoption, 00142 "Visible layer (input+target)"); 00143 00144 declareOption(ol, "visible_to_hidden", &InferenceRBM::visible_to_hidden, 00145 OptionBase::learntoption, 00146 "Connection between visible and hidden layers"); 00147 00148 declareOption(ol, "input_size", &InferenceRBM::input_size, 00149 OptionBase::learntoption, 00150 "Size of input_layer"); 00151 00152 declareOption(ol, "target_size", &InferenceRBM::target_size, 00153 OptionBase::learntoption, 00154 "Size of target_layer"); 00155 00156 declareOption(ol, "visible_size", &InferenceRBM::visible_size, 00157 OptionBase::learntoption, 00158 "Size of visible_layer"); 00159 00160 declareOption(ol, "hidden_size", &InferenceRBM::hidden_size, 00161 OptionBase::learntoption, 00162 "Size of hidden_layer"); 00163 00164 00165 // Now call the parent class' declareOptions 00166 inherited::declareOptions(ol); 00167 } 00168 00169 00171 // declareMethods // 00173 void InferenceRBM::declareMethods(RemoteMethodMap& rmm) 00174 { 00175 // Insert a backpointer to remote methods; note that this 00176 // different than for declareOptions() 00177 rmm.inherited(inherited::_getRemoteMethodMap_()); 00178 00179 declareMethod( 00180 rmm, "hiddenExpGivenVisible", 00181 &InferenceRBM::hiddenExpGivenVisible, 00182 (BodyDoc("Computes the hidden layer's expectation given the visible"), 00183 ArgDoc ("visible", "Visible layer's values"))); 00184 00185 declareMethod( 00186 rmm, "hiddenExpGivenInput", 00187 &InferenceRBM::hiddenExpGivenInput, 00188 (BodyDoc("Computes the hidden layer's expectation given the input"), 00189 ArgDoc ("input", "Input layer's values"))); 00190 00191 declareMethod( 00192 rmm, "hiddenExpGivenInputTarget", 00193 &InferenceRBM::hiddenExpGivenInputTarget, 00194 (BodyDoc("Computes the hidden layer's expectation given the input\n" 00195 "and the target"), 00196 ArgDoc ("input", "Input layer's values"), 00197 ArgDoc ("target", "Target (as an index)"))); 00198 00199 declareMethod( 00200 rmm, "targetExpGivenInput", 00201 &InferenceRBM::targetExpGivenInput, 00202 (BodyDoc("Computes the target layer's expectation given the input"), 00203 ArgDoc ("input", "Input layer's values"))); 00204 00205 declareMethod( 00206 rmm, "getHiddenExpGivenVisible", 00207 &InferenceRBM::getHiddenExpGivenVisible, 00208 (BodyDoc("Computes the hidden layer's expectation given the visible"), 00209 ArgDoc ("visible", "Visible layer's values"), 00210 RetDoc ("Hidden layer's expectation"))); 00211 00212 declareMethod( 00213 rmm, "getHiddenExpGivenInput", 00214 &InferenceRBM::getHiddenExpGivenInput, 00215 (BodyDoc("Computes the hidden layer's expectation given the input"), 00216 ArgDoc ("input", "Input layer's values"), 00217 RetDoc ("Hidden layer's expectation"))); 00218 00219 declareMethod( 00220 rmm, "getHiddenExpGivenInputTarget", 00221 &InferenceRBM::getHiddenExpGivenInputTarget, 00222 (BodyDoc("Computes the hidden layer's expectation given the input\n" 00223 "and the target"), 00224 ArgDoc ("input", "Input layer's values"), 00225 ArgDoc ("target", "Target (as an index)"), 00226 RetDoc ("Hidden layer's expectation"))); 00227 00228 declareMethod( 00229 rmm, "getTargetExpGivenInput", 00230 &InferenceRBM::getTargetExpGivenInput, 00231 (BodyDoc("Computes the target layer's expectation given the input"), 00232 ArgDoc ("input", "Input layer's values"), 00233 RetDoc ("Target layer's expectation"))); 00234 00235 declareMethod( 00236 rmm, "supCDStep", &InferenceRBM::supCDStep, 00237 (BodyDoc("Performs one step of CD and updates the parameters"), 00238 ArgDoc ("visible", "Visible layer's values"))); 00239 00240 declareMethod( 00241 rmm, "setLearningRate", &InferenceRBM::setLearningRate, 00242 (BodyDoc("Sets the learning rate of underlying modules"), 00243 ArgDoc ("the_learning_rate", "The learning rate"))); 00244 } 00245 00246 00247 void InferenceRBM::build_() 00248 { 00249 MODULE_LOG << "build_() called" << endl; 00250 00251 if( !input_layer || !target_layer || !hidden_layer 00252 || !input_to_hidden || !target_to_hidden ) 00253 { 00254 MODULE_LOG << "build_() aborted because layers and connections were" 00255 " not set" << endl; 00256 return; 00257 } 00258 00260 input_size = input_layer->size; 00261 target_size = target_layer->size; 00262 visible_size = input_size + target_size; 00263 hidden_size = hidden_layer->size; 00264 00265 PLASSERT(input_to_hidden->down_size == input_size); 00266 PLASSERT(input_to_hidden->up_size == hidden_size); 00267 PLASSERT(target_to_hidden->down_size == target_size); 00268 PLASSERT(target_to_hidden->up_size == hidden_size); 00269 00271 visible_layer = new RBMMixedLayer(); 00272 visible_layer->sub_layers.resize(2); 00273 visible_layer->sub_layers[0] = input_layer; 00274 visible_layer->sub_layers[1] = target_layer; 00275 visible_layer->build(); 00276 PLASSERT(visible_layer->size == visible_size); 00277 00279 visible_to_hidden = new RBMMixedConnection(); 00280 visible_to_hidden->sub_connections.resize(1,2); 00281 visible_to_hidden->sub_connections(0,0) = input_to_hidden; 00282 visible_to_hidden->sub_connections(0,1) = target_to_hidden; 00283 visible_to_hidden->build(); 00284 PLASSERT(visible_to_hidden->down_size == visible_size); 00285 PLASSERT(visible_to_hidden->up_size == hidden_size); 00286 00288 if (random_gen) 00289 { 00290 if (input_layer->random_gen.isNull()) 00291 { 00292 input_layer->random_gen = random_gen; 00293 input_layer->forget(); 00294 } 00295 if (target_layer->random_gen.isNull()) 00296 { 00297 target_layer->random_gen = random_gen; 00298 target_layer->forget(); 00299 } 00300 if (visible_layer->random_gen.isNull()) 00301 { 00302 visible_layer->random_gen = random_gen; 00303 visible_layer->forget(); 00304 } 00305 if (hidden_layer->random_gen.isNull()) 00306 { 00307 hidden_layer->random_gen = random_gen; 00308 hidden_layer->forget(); 00309 } 00310 if (input_to_hidden->random_gen.isNull()) 00311 { 00312 input_to_hidden->random_gen = random_gen; 00313 input_to_hidden->forget(); 00314 } 00315 if (target_to_hidden->random_gen.isNull()) 00316 { 00317 target_to_hidden->random_gen = random_gen; 00318 target_to_hidden->forget(); 00319 } 00320 if (visible_to_hidden->random_gen.isNull()) 00321 { 00322 visible_to_hidden->random_gen = random_gen; 00323 visible_to_hidden->forget(); 00324 } 00325 } 00326 00327 } 00328 00329 void InferenceRBM::hiddenExpGivenVisible(const Mat& visible) 00330 { 00331 PLASSERT(visible.width() == visible_size); 00332 00333 visible_to_hidden->setAsDownInputs(visible); 00334 hidden_layer->getAllActivations(get_pointer(visible_to_hidden), 0, true); 00335 hidden_layer->computeExpectations(); 00336 } 00337 00338 void InferenceRBM::hiddenExpGivenInputTarget(const Mat& input, 00339 const TVec<int>& target) 00340 { 00341 int batch_size = input.length(); 00342 PLASSERT(input.width() == input_size); 00343 PLASSERT(target.length() == batch_size); 00344 00345 input_to_hidden->setAsDownInputs(input); 00346 hidden_layer->getAllActivations(get_pointer(input_to_hidden), 0, true); 00347 00348 for (int k=0; k<batch_size; k++) 00349 hidden_layer->activations(k) += target_to_hidden->weights(target[k]); 00350 00351 hidden_layer->expectations_are_up_to_date = false; 00352 hidden_layer->computeExpectations(); 00353 } 00354 00355 void InferenceRBM::targetExpGivenInput(const Mat& input) 00356 { 00357 PLASSERT(input.width() == input_size); 00358 int batch_size = input.length(); 00359 00360 // input contains samples (or expectations) from input_layer 00361 input_to_hidden->setAsDownInputs(input); 00362 00363 // hidden_layer->activations = bias + input_to_hidden.weights * input 00364 hidden_layer->getAllActivations(get_pointer(input_to_hidden), 0, true); 00365 00366 target_layer->setBatchSize(batch_size); 00367 00368 // target_layer->activations[k][i] = 00369 // bias[i] + sum_j softplus(W_ji + hidden_layer->activations[k][j]) 00370 Mat hidden_act = hidden_layer->activations; 00371 Mat target_act = target_layer->activations; 00372 Vec target_b = target_layer->bias; 00373 Mat t_to_h_w = target_to_hidden->weights; 00374 00375 for (int k=0; k<batch_size; k++) 00376 { 00377 target_act(k) << target_b; 00378 00379 real* target_act_k = target_act[k]; 00380 real* hidden_act_kj = hidden_act[k]; 00381 for (int j=0; j<hidden_size; j++, hidden_act_kj++) 00382 { 00383 real* target_act_ki = target_act_k; // copy 00384 real* t_to_h_w_ji = t_to_h_w[j]; 00385 for (int i=0; i<target_size; i++, target_act_ki++, t_to_h_w_ji++) 00386 { 00387 PLASSERT(*target_act_ki == target_act(k,i)); 00388 PLASSERT(*t_to_h_w_ji == t_to_h_w(j,i)); 00389 PLASSERT(*hidden_act_kj == hidden_act(k,j)); 00390 00391 if (use_fast_approximations) 00392 *target_act_ki += 00393 tabulated_softplus(*t_to_h_w_ji + *hidden_act_kj); 00394 else 00395 *target_act_ki += softplus(*t_to_h_w_ji + *hidden_act_kj); 00396 } 00397 } 00398 } 00399 00400 target_layer->expectations_are_up_to_date = false; 00401 target_layer->computeExpectations(); 00402 } 00403 00404 void InferenceRBM::hiddenExpGivenInput(const Mat& input) 00405 { 00406 PLASSERT(input.width() == input_size); 00407 int batch_size = input.length(); 00408 00409 targetExpGivenInput(input); 00410 Mat target_exp = target_layer->getExpectations(); 00411 00412 Mat visible(batch_size, visible_size); 00413 visible.subMatColumns(0, input_size) << input; 00414 00415 Mat hidden_exp(batch_size, hidden_size); 00416 00417 for (int i=0; i<target_size; i++) 00418 { 00419 visible.subMatColumns(input_size, target_size).clear(); 00420 visible.column(input_size+i).fill(1.); 00421 00422 hiddenExpGivenVisible(visible); 00423 00424 for (int k=0; k<batch_size; k++) 00425 hidden_exp(k) += target_exp(k,i) * hidden_layer->getExpectations()(k); 00426 } 00427 00428 hidden_layer->setExpectations(hidden_exp); 00429 } 00430 00431 Mat InferenceRBM::getHiddenExpGivenVisible(const Mat& visible) 00432 { 00433 hiddenExpGivenVisible(visible); 00434 return hidden_layer->getExpectations(); 00435 } 00436 00437 Mat InferenceRBM::getHiddenExpGivenInputTarget(const Mat& input, 00438 const TVec<int>& target) 00439 { 00440 hiddenExpGivenInputTarget(input, target); 00441 return hidden_layer->getExpectations(); 00442 } 00443 00444 Mat InferenceRBM::getTargetExpGivenInput(const Mat& input) 00445 { 00446 targetExpGivenInput(input); 00447 return target_layer->getExpectations(); 00448 } 00449 00450 Mat InferenceRBM::getHiddenExpGivenInput(const Mat& input) 00451 { 00452 hiddenExpGivenInput(input); 00453 return hidden_layer->getExpectations(); 00454 } 00455 00456 void InferenceRBM::supCDStep(const Mat& visible) 00457 { 00458 PLASSERT(visible.width() == visible_size); 00459 int batch_size = visible.length(); 00460 00461 v0.resize(batch_size,visible_size); 00462 v0 << visible; 00463 00464 // positive phase 00465 hiddenExpGivenVisible(visible); 00466 h0.resize(batch_size, hidden_size); 00467 h0 << hidden_layer->getExpectations(); 00468 00469 // Down propagation 00470 visible_to_hidden->setAsUpInputs(h0); 00471 visible_layer->getAllActivations(get_pointer(visible_to_hidden), 0, true); 00472 visible_layer->computeExpectations(); 00473 visible_layer->generateSamples(); 00474 00475 // Negative phase 00476 hiddenExpGivenVisible(visible_layer->samples); 00477 00478 // Update 00479 visible_layer->update(v0, visible_layer->samples); 00480 visible_to_hidden->update(v0, h0, visible_layer->samples, 00481 hidden_layer->getExpectations()); 00482 hidden_layer->update(h0, hidden_layer->getExpectations()); 00483 } 00484 00485 void InferenceRBM::unsupCDStep(const Mat& input) 00486 { 00487 PLCHECK_MSG(false, "Not implemented yet"); 00488 } 00489 00490 void InferenceRBM::setLearningRate(real the_learning_rate) 00491 { 00492 visible_layer->setLearningRate(the_learning_rate); 00493 visible_to_hidden->setLearningRate(the_learning_rate); 00494 hidden_layer->setLearningRate(the_learning_rate); 00495 } 00496 00497 } // end of namespace PLearn 00498 00499 00500 /* 00501 Local Variables: 00502 mode:c++ 00503 c-basic-offset:4 00504 c-file-style:"stroustrup" 00505 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00506 indent-tabs-mode:nil 00507 fill-column:79 00508 End: 00509 */ 00510 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :