PLearn 0.1
RBMJointLLParameters.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // RBMJointLLParameters.cc
00004 //
00005 // Copyright (C) 2006 Pascal Lamblin
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Pascal Lamblin
00036 
00041 #include "RBMJointLLParameters.h"
00042 #include <plearn/math/TMat_maths.h>
00043 
00044 namespace PLearn {
00045 using namespace std;
00046 
00047 PLEARN_IMPLEMENT_OBJECT(
00048     RBMJointLLParameters,
00049     "Parameters tying the last, previous and target layers of a DeepBeliefNet",
00050     "[to be completed]");
00051 
00052 RBMJointLLParameters::RBMJointLLParameters( real the_learning_rate )
00053     : inherited(the_learning_rate)
00054 {
00055 }
00056 
00057 RBMJointLLParameters::RBMJointLLParameters(
00058     PP<RBMLLParameters>& the_target_params,
00059     PP<RBMLLParameters>& the_cond_params,
00060     real the_learning_rate )
00061     : inherited( the_learning_rate ),
00062       target_params( the_target_params ),
00063       cond_params( the_cond_params )
00064 {
00065     // We're not sure inherited::build() has been called
00066     build();
00067 }
00068 
00069 
00070 void RBMJointLLParameters::declareOptions(OptionList& ol)
00071 {
00072     // ### Declare all of this object's options here.
00073     // ### For the "flags" of each option, you should typically specify
00074     // ### one of OptionBase::buildoption, OptionBase::learntoption or
00075     // ### OptionBase::tuningoption. If you don't provide one of these three,
00076     // ### this option will be ignored when loading values from a script.
00077     // ### You can also combine flags, for example with OptionBase::nosave:
00078     // ### (OptionBase::buildoption | OptionBase::nosave)
00079 
00080     declareOption(ol, "target_params",
00081                   &RBMJointLLParameters::target_params,
00082                   OptionBase::buildoption,
00083                   "RBMParameters between the target and the upper layer");
00084 
00085     declareOption(ol, "cond_params", &RBMJointLLParameters::cond_params,
00086                   OptionBase::buildoption,
00087                   "RBMParameters between the conditioning input and the upper"
00088                   " layer");
00089 
00090     // Now call the parent class' declareOptions
00091     inherited::declareOptions(ol);
00092 }
00093 
00094 void RBMJointLLParameters::build_units_types()
00095 {
00096     if( !target_params || !cond_params )
00097         return;
00098 
00099     if( target_params->up_units_types != cond_params->up_units_types )
00100         PLERROR( "RBMJointLLParameters::build_units_types - \n"
00101                  "target_params->up_units_types should be equal to"
00102                  " cond_params->up_units_types\n"
00103                  "(\"%s\" != \"%s\").\n",
00104                  target_params->up_units_types.c_str(),
00105                  cond_params->up_units_types.c_str() );
00106 
00107     up_units_types = cond_params->up_units_types;
00108     down_units_types = target_params->down_units_types;
00109     down_units_types += cond_params->down_units_types;
00110 
00111     target_size = target_params->down_layer_size;
00112     cond_size = cond_params->down_layer_size;
00113 
00114     // to avoid "forget()" being called in RBMParameters::build_()
00115     weights.resize( int(up_units_types.length()),
00116                     int(down_units_types.length()) );
00117     out_act.resize( int(up_units_types.length()) );
00118 }
00119 
00120 void RBMJointLLParameters::build_()
00121 {
00122     // The first part of weights correspond to the weights between the target
00123     // part and the up layer
00124     weights.subMatColumns( 0, target_size ) << target_params->weights;
00125     target_params->weights = weights.subMatColumns( 0, target_size );
00126 
00127     // Second part correspond to the weights between the cond and up layer
00128     weights.subMatColumns( target_size, cond_size ) << cond_params->weights;
00129     cond_params->weights = weights.subMatColumns( target_size, cond_size );
00130 
00131     // same thing for the statistics
00132     target_params->weights_pos_stats =
00133         weights_pos_stats.subMatColumns( 0, target_size );
00134 
00135     cond_params->weights_pos_stats =
00136         weights_pos_stats.subMatColumns( target_size, cond_size );
00137 
00138     target_params->weights_neg_stats =
00139         weights_neg_stats.subMatColumns( 0, target_size );
00140 
00141     cond_params->weights_neg_stats =
00142         weights_neg_stats.subMatColumns( target_size, cond_size );
00143 
00144     // Same thing for down units bias
00145     down_units_bias = merge( target_params->down_units_bias,
00146                              cond_params->down_units_bias );
00147 
00148     target_params->down_units_bias_pos_stats =
00149         down_units_bias_pos_stats.subVec( 0, target_size );
00150 
00151     cond_params->down_units_bias_pos_stats =
00152         down_units_bias_pos_stats.subVec( target_size, cond_size );
00153 
00154     target_params->down_units_bias_neg_stats =
00155         down_units_bias_neg_stats.subVec( 0, target_size );
00156 
00157     cond_params->down_units_bias_neg_stats =
00158         down_units_bias_neg_stats.subVec( target_size, cond_size );
00159 
00160     // The up layer units parameters are shared between the three RBMParameters
00161     up_units_bias = cond_params->up_units_bias;
00162     target_params->up_units_bias = up_units_bias;
00163 
00164     target_params->up_units_bias_pos_stats = up_units_bias_pos_stats;
00165     cond_params->up_units_bias_pos_stats = up_units_bias_pos_stats;
00166 
00167     target_params->up_units_bias_neg_stats = up_units_bias_neg_stats;
00168     cond_params->up_units_bias_neg_stats = up_units_bias_neg_stats;
00169 
00170     // sizes for fprop() and all OnlineLearningModules methods
00171     input_size = cond_size;
00172     output_size = target_size;
00173 }
00174 
00175 void RBMJointLLParameters::build()
00176 {
00177     // Begin by this, else inherited::build() will not work properly
00178     build_units_types();
00179 
00180     inherited::build();
00181     build_();
00182 }
00183 
00184 
00185 void RBMJointLLParameters::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00186 {
00187     inherited::makeDeepCopyFromShallowCopy(copies);
00188 
00189     deepCopyField(target_params, copies);
00190     deepCopyField(cond_params, copies);
00191 }
00192 
00193 void RBMJointLLParameters::setAsUpInput( const Vec& input ) const
00194 {
00195     target_given_cond = false;
00196     inherited::setAsUpInput( input );
00197 }
00198 
00199 void RBMJointLLParameters::setAsDownInput( const Vec& input ) const
00200 {
00201     target_given_cond = false;
00202     inherited::setAsDownInput( input );
00203 }
00204 
00205 void RBMJointLLParameters::setAsCondInput( const Vec& input ) const
00206 {
00207     PLASSERT( input.size() == cond_size );
00208     input_vec = input;
00209     target_given_cond = true;
00210     going_up = false;
00211 }
00212 
00213 void RBMJointLLParameters::fprop(const Vec& input, Vec& output) const
00214 {
00215     // sets "input" as conditioning input, and compute "output"
00216     setAsCondInput( input );
00217     output.resize( output_size );
00218     computeUnitActivations( 0, output_size, output );
00219 }
00220 
00221 
00222 void RBMJointLLParameters::computeUnitActivations
00223     ( int start, int length, const Vec& activations ) const
00224 {
00225     if( target_given_cond )
00226     {
00227         PLASSERT( activations.length() == length );
00228         PLASSERT( start+length <= target_size );
00229 
00230         out_act << up_units_bias;
00231         Mat V = weights.subMatColumns( target_size, cond_size );
00232         // out_act = up_units_bias + V * input_vec
00233         productAcc( out_act, V, input_vec );
00234 
00235         // actY_i = B_i - sum_j softplus(-(W_ji + C_j + sum_k V_jk p(P_k)))
00236         //        = B_i - sum_j softplus(-(W_ji + out_act[j]) )
00237         for( int i=start ; i<start+length ; i++ )
00238         {
00239             real somme = down_units_bias[i];
00240             real *w = &weights[0][i];
00241             // step from one row to the next in weights matrix
00242             int m = weights.mod();
00243 
00244             for( int j=0; j< weights.length() ; j++, w+=m )
00245             {
00246                 // *w = weights(j,i)
00247                 somme -= softplus( -(*w + out_act[j]));
00248             }
00249             activations[i-start] = somme;
00250         }
00251     }
00252     else
00253         inherited::computeUnitActivations( start, length, activations );
00254 }
00255 
00257 void RBMJointLLParameters::bpropUpdate(const Vec& input,
00258                                        const Vec& output,
00259                                        Vec& input_gradient,
00260                                        const Vec& output_gradient)
00261 {
00262     PLASSERT( input.size() == cond_size );
00263     PLASSERT( output.size() == target_size );
00264     PLASSERT( output_gradient.size() == target_size );
00265     input_gradient.resize( cond_size );
00266     input_gradient.clear();
00267 
00268     //for( int k=0 ; k<target_size ; k++ )
00269     //    down_units_bias[k] -= learning_rate * output_gradient[k];
00270     multiplyAcc( down_units_bias.subVec(0, target_size),
00271                  output_gradient, -learning_rate );
00272 
00273     for( int i=0 ; i<up_layer_size ; i++ )
00274     {
00275         real* w = weights[i];
00276         real d_out_act = 0;
00277         for( int k=0 ; k<target_size ; k++ )
00278         {
00279             // dC/d(weights(i,k)+out_act[i])
00280             real d_z = output_gradient[k] * (-sigmoid(-w[k]-out_act[i]));
00281             w[k] -= learning_rate * d_z;
00282 
00283             d_out_act += d_z;
00284         }
00285         up_units_bias[i] -= learning_rate * d_out_act;
00286 
00287         for( int j=0 ; j<cond_size ; j++ )
00288         {
00289             real& w_ij = w[j+target_size];
00290             input_gradient[j] += d_out_act * w_ij;
00291             w_ij -= learning_rate * d_out_act * input[j];
00292         }
00293     }
00294 
00295 }
00296 
00299 void RBMJointLLParameters::forget()
00300 {
00301     if( target_params )
00302         target_params->forget();
00303 
00304     if( cond_params )
00305         cond_params->forget();
00306 
00307     clearStats();
00308 }
00309 
00310 /* THIS METHOD IS OPTIONAL
00315 void RBMJointLLParameters::finalize()
00316 {
00317 }
00318 */
00319 
00320 
00321 } // end of namespace PLearn
00322 
00323 
00324 /*
00325   Local Variables:
00326   mode:c++
00327   c-basic-offset:4
00328   c-file-style:"stroustrup"
00329   c-file-offsets:((innamespace . 0)(inline-open . 0))
00330   indent-tabs-mode:nil
00331   fill-column:79
00332   End:
00333 */
00334 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines