PLearn 0.1
RBMJointGenericParameters.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // RBMJointGenericParameters.cc
00004 //
00005 // Copyright (C) 2006 Pascal Lamblin
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Pascal Lamblin
00036 
00041 #include "RBMJointGenericParameters.h"
00042 #include <plearn/math/TMat_maths.h>
00043 //#include "RBMLayer.h"
00044 
00045 namespace PLearn {
00046 using namespace std;
00047 
00048 PLEARN_IMPLEMENT_OBJECT(
00049     RBMJointGenericParameters,
00050     "Parameters tying the last, previous and target layers of a DeepBeliefNet",
00051     "[to be completed]");
00052 
00053 RBMJointGenericParameters::RBMJointGenericParameters( real the_learning_rate )
00054     : inherited(the_learning_rate)
00055 {
00056 }
00057 
00058 RBMJointGenericParameters::RBMJointGenericParameters(
00059     PP<RBMGenericParameters>& the_target_params,
00060     PP<RBMGenericParameters>& the_cond_params,
00061     real the_learning_rate )
00062     : inherited( the_learning_rate ),
00063       target_params( the_target_params ),
00064       cond_params( the_cond_params )
00065 {
00066     // We're not sure inherited::build() has been called
00067     build();
00068 }
00069 
00070 
00071 void RBMJointGenericParameters::declareOptions(OptionList& ol)
00072 {
00073     // ### Declare all of this object's options here.
00074     // ### For the "flags" of each option, you should typically specify
00075     // ### one of OptionBase::buildoption, OptionBase::learntoption or
00076     // ### OptionBase::tuningoption. If you don't provide one of these three,
00077     // ### this option will be ignored when loading values from a script.
00078     // ### You can also combine flags, for example with OptionBase::nosave:
00079     // ### (OptionBase::buildoption | OptionBase::nosave)
00080 
00081     declareOption(ol, "target_params",
00082                   &RBMJointGenericParameters::target_params,
00083                   OptionBase::buildoption,
00084                   "RBMParameters between the target and the upper layer");
00085 
00086     declareOption(ol, "cond_params", &RBMJointGenericParameters::cond_params,
00087                   OptionBase::buildoption,
00088                   "RBMParameters between the conditioning input and the upper"
00089                   " layer");
00090 
00091     // Now call the parent class' declareOptions
00092     inherited::declareOptions(ol);
00093 }
00094 
00095 void RBMJointGenericParameters::build_units_types()
00096 {
00097     if( !target_params || !cond_params )
00098         return;
00099 
00100     if( target_params->up_units_types != cond_params->up_units_types )
00101         PLERROR( "RBMJointGenericParameters::build_units_types - \n"
00102                  "target_params->up_units_types should be equal to"
00103                  " cond_params->up_units_types\n"
00104                  "(\"%s\" != \"%s\").\n",
00105                  target_params->up_units_types.c_str(),
00106                  cond_params->up_units_types.c_str() );
00107 
00108     up_units_types = cond_params->up_units_types;
00109     down_units_types = target_params->down_units_types;
00110 
00111     target_size = target_params->down_layer_size;
00112     cond_size = cond_params->down_layer_size;
00113 
00114     for( int i=0 ; i<target_size ; i++ )
00115         if( down_units_types[i] != 'l' )
00116             PLERROR( "RBMJointGenericParameters::build_units_types - \n"
00117                      "target_params->down_units_types[%d] should be 'l', is"
00118                      " '%c'.\n", i, down_units_types[i] );
00119 
00120     down_units_types += cond_params->down_units_types;
00121 
00122     // to avoid "forget()" being called in RBMParameters::build_()
00123     weights.resize( int(up_units_types.length()),
00124                     int(down_units_types.length()) );
00125     out_act.resize( int(up_units_types.length()) );
00126 }
00127 
00128 void RBMJointGenericParameters::build_()
00129 {
00130     // The first part of weights correspond to the weights between the target
00131     // part and the up layer
00132     weights.subMatColumns( 0, target_size ) << target_params->weights;
00133     target_params->weights = weights.subMatColumns( 0, target_size );
00134 
00135     // Second part correspond to the weights between the cond and up layer
00136     weights.subMatColumns( target_size, cond_size ) << cond_params->weights;
00137     cond_params->weights = weights.subMatColumns( target_size, cond_size );
00138 
00139     // same thing for the statistics
00140     target_params->weights_pos_stats =
00141         weights_pos_stats.subMatColumns( 0, target_size );
00142 
00143     cond_params->weights_pos_stats =
00144         weights_pos_stats.subMatColumns( target_size, cond_size );
00145 
00146     target_params->weights_neg_stats =
00147         weights_neg_stats.subMatColumns( 0, target_size );
00148 
00149     cond_params->weights_neg_stats =
00150         weights_neg_stats.subMatColumns( target_size, cond_size );
00151 
00152     // Same thing for down units parameters
00153     down_units_params = merge( target_params->down_units_params,
00154                                cond_params->down_units_params );
00155 
00156     target_params->down_units_params_pos_stats =
00157         down_units_params_pos_stats.subVec( 0, target_size );
00158 
00159     cond_params->down_units_params_pos_stats =
00160         down_units_params_pos_stats.subVec( target_size, cond_size );
00161 
00162     target_params->down_units_params_neg_stats =
00163         down_units_params_neg_stats.subVec( 0, target_size );
00164 
00165     cond_params->down_units_params_neg_stats =
00166         down_units_params_neg_stats.subVec( target_size, cond_size );
00167 
00168     // The up layer units parameters are shared between the three RBMParameters
00169     up_units_params = cond_params->up_units_params;
00170     target_params->up_units_params = up_units_params;
00171 
00172     target_params->up_units_params_pos_stats = up_units_params_pos_stats;
00173     cond_params->up_units_params_pos_stats = up_units_params_pos_stats;
00174 
00175     target_params->up_units_params_neg_stats = up_units_params_neg_stats;
00176     cond_params->up_units_params_neg_stats = up_units_params_neg_stats;
00177 
00178     // sizes for fprop() and all OnlineLearningModules methods
00179     input_size = cond_size;
00180     output_size = target_size;
00181 }
00182 
00183 void RBMJointGenericParameters::build()
00184 {
00185     // Begin by this, else inherited::build() will not work properly
00186     build_units_types();
00187 
00188     inherited::build();
00189     build_();
00190 }
00191 
00192 
00193 void RBMJointGenericParameters::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00194 {
00195     inherited::makeDeepCopyFromShallowCopy(copies);
00196 
00197     deepCopyField(target_params, copies);
00198     deepCopyField(cond_params, copies);
00199 }
00200 
00201 void RBMJointGenericParameters::setAsUpInput( const Vec& input ) const
00202 {
00203     target_given_cond = false;
00204     inherited::setAsUpInput( input );
00205 }
00206 
00207 void RBMJointGenericParameters::setAsDownInput( const Vec& input ) const
00208 {
00209     target_given_cond = false;
00210     inherited::setAsDownInput( input );
00211 }
00212 
00213 void RBMJointGenericParameters::setAsCondInput( const Vec& input ) const
00214 {
00215     PLASSERT( input.size() == cond_size );
00216     input_vec = input;
00217     target_given_cond = true;
00218     going_up = false;
00219 }
00220 
00221 void RBMJointGenericParameters::computeLinearUnitActivations
00222     ( int i, const Vec& activations ) const
00223 {
00224     if( target_given_cond )
00225     {
00226         PLASSERT( activations.length() == 1 );
00227         Mat V = weights.subMatColumns(target_size, cond_size);
00228         if (i==0)
00229             for (int j=0;j<weights.length(); j++)
00230                 out_act[j] =
00231                     up_units_params[j][0] + matRowDotVec(V, j, input_vec);
00232 
00233         // actY_i = B_i - sum_j softplus(-(W_ij + C_j + sum_k V_jk p(P_k)))
00234         real somme = down_units_params[i][0];
00235         real* w = &weights[0][i];
00236         // step from one row to the next in weights matrix
00237         int m = weights.mod();
00238 
00239         for( int j=0; j< weights.length() ; j++, w+=m )
00240         {
00241             // *w = weights(j,i)
00242             somme -= softplus( -(*w + out_act[j]));
00243         }
00244         activations[0] = somme;
00245     }
00246     else
00247         inherited::computeLinearUnitActivations(i, activations);
00248 }
00249 
00250 void RBMJointGenericParameters::fprop(const Vec& input, Vec& output) const
00251 {
00252     // sets "input" as conditioning input, and compute "output"
00253     setAsCondInput( input );
00254     output.resize( output_size );
00255     computeUnitActivations( 0, output_size, output );
00256 }
00257 
00258 
00259 
00260 void RBMJointGenericParameters::computeQuadraticUnitActivations
00261     ( int i, const Vec& activations ) const
00262 {
00263     if( target_given_cond )
00264         PLERROR( "RBMJointGenericParameters::computeQuadraticUnitActivations"
00265                  " -\n"
00266                  "Cannot compute the activation of a quadratic unit in mode "
00267                  "\"target_given_cond\".\n" );
00268     else
00269         inherited::computeQuadraticUnitActivations(i, activations);
00270 }
00271 
00272 
00273 void RBMJointGenericParameters::computeUnitActivations
00274     ( int start, int length, const Vec& activations ) const
00275 {
00276     string units_types;
00277     if( target_given_cond )
00278         units_types = down_units_types.substr(0, target_size);
00279     else if( going_up )
00280         units_types = up_units_types;
00281     else
00282         units_types = down_units_types;
00283 
00284     PLASSERT( start+length <= (int) units_types.length() );
00285     int cur_pos = 0; // position index inside activations
00286 
00287     for( int i=start ; i<start+length ; i++ )
00288     {
00289         char ut_i = units_types[i];
00290         if( ut_i == 'l' )
00291         {
00292             computeLinearUnitActivations( i, activations.subVec(cur_pos, 1) );
00293             cur_pos++;
00294         }
00295         else if( ut_i == 'q' )
00296         {
00297             computeQuadraticUnitActivations
00298                 ( i, activations.subVec(cur_pos, 2) );
00299             cur_pos += 2;
00300         }
00301         else
00302             PLERROR( "RBMJointGenericParameters::computeUnitActivations():\n"
00303                      "value '%c' for units_types[%d] is unknown.\n"
00304                      "Supported values are 'l' and 'q'.\n", ut_i, i );
00305     }
00306 }
00307 
00309 void RBMJointGenericParameters::bpropUpdate(const Vec& input,
00310                                             const Vec& output,
00311                                             Vec& input_gradient,
00312                                             const Vec& output_gradient)
00313 {
00314     PLERROR( "RBMJointGenericParameters::bpropUpdate() not implemented yet.\n"
00315              "If you only have linear units on up and down layer, you should\n"
00316              "consider using RBMJointLLParameters instead.\n" );
00317 }
00318 
00321 void RBMJointGenericParameters::forget()
00322 {
00323     if( target_params )
00324         target_params->forget();
00325 
00326     if( cond_params )
00327         cond_params->forget();
00328 
00329     clearStats();
00330 }
00331 
00332 /* THIS METHOD IS OPTIONAL
00337 void RBMJointGenericParameters::finalize()
00338 {
00339 }
00340 */
00341 
00342 
00343 } // end of namespace PLearn
00344 
00345 
00346 /*
00347   Local Variables:
00348   mode:c++
00349   c-basic-offset:4
00350   c-file-style:"stroustrup"
00351   c-file-offsets:((innamespace . 0)(inline-open . 0))
00352   indent-tabs-mode:nil
00353   fill-column:79
00354   End:
00355 */
00356 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines