PLearn 0.1
ProcessInputCostModule.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // ProcessInputCostModule.cc
00004 //
00005 // Copyright (C) 2007 Pascal Lamblin
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Pascal Lamblin
00036 
00041 #include "ProcessInputCostModule.h"
00042 
00043 namespace PLearn {
00044 using namespace std;
00045 
00046 PLEARN_IMPLEMENT_OBJECT(
00047     ProcessInputCostModule,
00048     "Processes the input through an embedded OnlineLearningModule",
00049     "This Module embeds an OnlineLearningModule, processing_module, and a\n"
00050     "CostModule, cost_module. The input goes through processing_module,\n"
00051     "the output of which is used as input by the CostModule.\n"
00052     "If you want the input to go through several processing steps, you can\n"
00053     "use a ModuleStackModule as processing_module.\n"
00054     );
00055 
00056 ProcessInputCostModule::ProcessInputCostModule() :
00057     processed_size( -1 )
00058 {
00059 }
00060 
00061 void ProcessInputCostModule::declareOptions(OptionList& ol)
00062 {
00063     // declareOption(ol, "myoption", &ProcessInputCostModule::myoption,
00064     //               OptionBase::buildoption,
00065     //               "Help text describing this option");
00066 
00067     declareOption(ol, "processing_module",
00068                   &ProcessInputCostModule::processing_module,
00069                   OptionBase::buildoption,
00070                   "Module that processes the input");
00071 
00072     declareOption(ol, "cost_module",
00073                   &ProcessInputCostModule::cost_module,
00074                   OptionBase::buildoption,
00075                   "Module that outputs the cost");
00076 
00077     declareOption(ol, "processed_size",
00078                   &ProcessInputCostModule::processed_size,
00079                   OptionBase::learntoption,
00080                   "Size of processing_module's output");
00081 
00082     // Now call the parent class' declareOptions
00083     inherited::declareOptions(ol);
00084 }
00085 
00086 void ProcessInputCostModule::build_()
00087 {
00088     if( processing_module )
00089     {
00090         input_size = processing_module->input_size;
00091         processed_size = processing_module->output_size;
00092         // If we have a random_gen and processing_module does not, share it
00093         if( random_gen && !(processing_module->random_gen) )
00094         {
00095             processing_module->random_gen = random_gen;
00096             processing_module->forget();
00097         }
00098     }
00099 
00100     if( cost_module )
00101     {
00102         output_size = cost_module->output_size;
00103         target_size = cost_module->target_size;
00104         // If we have a random_gen and cost_module does not, share it
00105         if( random_gen && !(cost_module->random_gen) )
00106         {
00107             cost_module->random_gen = random_gen;
00108             cost_module->forget();
00109         }
00110     }
00111 
00112     if( processing_module && cost_module )
00113         PLASSERT( processed_size == cost_module->input_size );
00114 }
00115 
00116 void ProcessInputCostModule::build()
00117 {
00118     inherited::build();
00119     build_();
00120 }
00121 
00122 
00123 void ProcessInputCostModule::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00124 {
00125     inherited::makeDeepCopyFromShallowCopy(copies);
00126 
00127     deepCopyField(processing_module, copies);
00128     deepCopyField(cost_module, copies);
00129     deepCopyField(processed_value, copies);
00130     deepCopyField(processed_values, copies);
00131     deepCopyField(processed_gradient, copies);
00132     deepCopyField(processed_gradients, copies);
00133     deepCopyField(processed_diag_hessian, copies);
00134     deepCopyField(processed_diag_hessians, copies);
00135 }
00136 
00138 // fprop //
00140 void ProcessInputCostModule::fprop(const Vec& input, const Vec& target,
00141                                    real& cost) const
00142 {
00143     PLASSERT( processing_module );
00144     PLASSERT( cost_module );
00145     PLASSERT( input.size() == input_size );
00146     PLASSERT( target.size() == target_size );
00147 
00148     processing_module->fprop( input, processed_value );
00149     cost_module->fprop( processed_value, target, cost );
00150 }
00151 
00152 void ProcessInputCostModule::fprop(const Mat& inputs, const Mat& targets,
00153                                    Vec& costs )
00154 {
00155     PLASSERT( processing_module );
00156     PLASSERT( cost_module );
00157     PLASSERT( inputs.width() == input_size );
00158     PLASSERT( targets.width() == target_size );
00159     PLASSERT( inputs.length() == targets.length() );
00160 
00161     processing_module->fprop( inputs, processed_values );
00162     cost_module->fprop( processed_values, targets, costs );
00163 }
00164 
00165 void ProcessInputCostModule::fprop(const Vec& input, const Vec& target,
00166                                    Vec& cost) const
00167 {
00168     PLASSERT( processing_module );
00169     PLASSERT( cost_module );
00170     PLASSERT( input.size() == input_size );
00171     PLASSERT( target.size() == target_size );
00172 
00173     processing_module->fprop( input, processed_value );
00174     cost_module->fprop( processed_value, target, cost );
00175 }
00176 
00177 void ProcessInputCostModule::fprop(const Mat& inputs, const Mat& targets,
00178                                    Mat& costs ) const
00179 {
00180     PLASSERT( processing_module );
00181     PLASSERT( cost_module );
00182     PLASSERT( inputs.width() == input_size );
00183     PLASSERT( targets.width() == target_size );
00184     PLASSERT( inputs.length() == targets.length() );
00185 
00186     processing_module->fprop( inputs, processed_values );
00187     cost_module->fprop( processed_values, targets, costs );
00188 }
00189 
00190 
00191 
00193 // bpropUpdate //
00195 void ProcessInputCostModule::bpropUpdate(const Vec& input, const Vec& target,
00196                                          real cost, Vec& input_gradient,
00197                                          bool accumulate)
00198 {
00199     PLASSERT( processing_module );
00200     PLASSERT( cost_module );
00201     PLASSERT( input.size() == input_size );
00202     PLASSERT( target.size() == target_size );
00203 
00204     if( accumulate )
00205     {
00206         PLASSERT_MSG( input_gradient.size() == input_size,
00207                       "Cannot resize input_gradient AND accumulate into it" );
00208     }
00209 
00210     cost_module->bpropUpdate( processed_value, target, cost,
00211                               processed_gradient );
00212     processing_module->bpropUpdate( input, processed_value,
00213                                     input_gradient, processed_gradient,
00214                                     accumulate );
00215 }
00216 
00217 void ProcessInputCostModule::bpropUpdate(const Mat& inputs, const Mat& targets,
00218                                          const Vec& costs, Mat& input_gradients,
00219                                          bool accumulate)
00220 {
00221     PLASSERT( processing_module );
00222     PLASSERT( cost_module );
00223     PLASSERT( inputs.width() == input_size );
00224     PLASSERT( targets.width() == target_size );
00225     PLASSERT( inputs.length() == targets.length() );
00226     PLASSERT( inputs.length() == costs.size() );
00227 
00228     if( accumulate )
00229     {
00230         PLASSERT_MSG( input_gradients.width() == input_size
00231                       && input_gradients.length() == inputs.length(),
00232                       "Cannot resize input_gradient AND accumulate into it" );
00233     }
00234 
00235     cost_module->bpropUpdate( processed_values, targets, costs,
00236                               processed_gradients );
00237     processing_module->bpropUpdate( inputs, processed_values,
00238                                     input_gradients, processed_gradients,
00239                                     accumulate );
00240 }
00241 
00242 
00244 // bbpropUpdate //
00246 void ProcessInputCostModule::bbpropUpdate(const Vec& input, const Vec& target,
00247                                           real cost, Vec& input_gradient,
00248                                           Vec& input_diag_hessian,
00249                                           bool accumulate)
00250 {
00251     PLASSERT( processing_module );
00252     PLASSERT( cost_module );
00253     PLASSERT( input.size() == input_size );
00254     PLASSERT( target.size() == target_size );
00255 
00256     if( accumulate )
00257     {
00258         PLASSERT_MSG( input_gradient.size() == input_size,
00259                       "Cannot resize input_gradient AND accumulate into it" );
00260         PLASSERT_MSG( input_diag_hessian.size() == input_size,
00261                       "Cannot resize input_diag_hessian AND accumulate into it"
00262                     );
00263     }
00264 
00265     cost_module->bbpropUpdate( processed_value, target, cost,
00266                                processed_gradient, processed_diag_hessian );
00267     processing_module->bbpropUpdate( input, processed_value,
00268                                      input_gradient, processed_gradient,
00269                                      input_diag_hessian,
00270                                      processed_diag_hessian,
00271                                      accumulate );
00272 }
00273 
00274 
00276 // forget //
00278 void ProcessInputCostModule::forget()
00279 {
00280     PLASSERT( processing_module );
00281     PLASSERT( cost_module );
00282 
00283     processed_value.clear();
00284     processed_gradient.clear();
00285     processed_diag_hessian.clear();
00286 
00287     if( !random_gen )
00288     {
00289         PLWARNING("CombiningCostsModule: cannot forget() without random_gen");
00290         return;
00291     }
00292 
00293     // Ensures processing_module and cost_module can forget
00294     if( !(processing_module->random_gen) )
00295         processing_module->random_gen = random_gen;
00296     processing_module->forget();
00297     if( !(cost_module->random_gen) )
00298         cost_module->random_gen = random_gen;
00299     cost_module->forget();
00300 }
00301 
00303 // name //
00305 TVec<string> ProcessInputCostModule::costNames()
00306 {
00307     if (name == "" || name == classname())
00308         return cost_module->costNames();
00309     else
00310     {
00311         int n_costs = cost_module->costNames().length();
00312         TVec<string> cost_names(n_costs);
00313         for (int i=0; i<n_costs; i++)
00314             cost_names[i] = name + "." + cost_module->costNames()[i];
00315 
00316         return cost_names;
00317     }
00318 }
00319 
00321 // finalize //
00323 void ProcessInputCostModule::finalize()
00324 {
00325     processing_module->finalize();
00326     cost_module->finalize();
00327 }
00328 
00330 // bpropDoesNothing //
00332 bool ProcessInputCostModule::bpropDoesNothing()
00333 {
00334     return processing_module->bpropDoesNothing()
00335         && cost_module->bpropDoesNothing();
00336 }
00337 
00339 // setLearningRate //
00341 void ProcessInputCostModule::setLearningRate(real dynamic_learning_rate)
00342 {
00343     processing_module->setLearningRate( dynamic_learning_rate );
00344     cost_module->setLearningRate( dynamic_learning_rate );
00345 }
00346 
00347 
00348 } // end of namespace PLearn
00349 
00350 
00351 /*
00352   Local Variables:
00353   mode:c++
00354   c-basic-offset:4
00355   c-file-style:"stroustrup"
00356   c-file-offsets:((innamespace . 0)(inline-open . 0))
00357   indent-tabs-mode:nil
00358   fill-column:79
00359   End:
00360 */
00361 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines