PLearn 0.1
SumOverBagsVariable.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // PLearn (A C++ Machine Learning Library)
00004 // Copyright (C) 1998 Pascal Vincent
00005 // Copyright (C) 1999-2002 Pascal Vincent, Yoshua Bengio, Rejean Ducharme and University of Montreal
00006 // Copyright (C) 2001-2002 Nicolas Chapados, Ichiro Takeuchi, Jean-Sebastien Senecal
00007 // Copyright (C) 2002 Xiangdong Wang, Christian Dorion
00008 
00009 // Redistribution and use in source and binary forms, with or without
00010 // modification, are permitted provided that the following conditions are met:
00011 // 
00012 //  1. Redistributions of source code must retain the above copyright
00013 //     notice, this list of conditions and the following disclaimer.
00014 // 
00015 //  2. Redistributions in binary form must reproduce the above copyright
00016 //     notice, this list of conditions and the following disclaimer in the
00017 //     documentation and/or other materials provided with the distribution.
00018 // 
00019 //  3. The name of the authors may not be used to endorse or promote
00020 //     products derived from this software without specific prior written
00021 //     permission.
00022 // 
00023 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00024 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00025 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00026 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00027 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00028 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00029 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00030 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00031 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00032 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 // 
00034 // This file is part of the PLearn library. For more information on the PLearn
00035 // library, go to the PLearn Web site at www.plearn.org
00036 
00037 
00038 /* *******************************************************      
00039  * $Id: SumOverBagsVariable.cc 8854 2008-04-21 20:56:25Z tihocan $
00040  * This file is part of the PLearn library.
00041  ******************************************************* */
00042 
00043 #include "SumOverBagsVariable.h"
00044 #include <plearn/math/TMat_maths.h>
00045 //#include "PLMPI.h"
00046 //#include "DisplayUtils.h"
00047 
00048 namespace PLearn {
00049 using namespace std;
00050 
00051 
00052 
00055 PLEARN_IMPLEMENT_OBJECT(SumOverBagsVariable, "Variable that sums the value of a Func each time evaluated on a subsequence of a VMat\n", 
00056                         "returns\n"
00057                         "   Sum_{bags in vmat} f(inputs and targets in bag)\n"
00058                         "(it can average this sum over the number of bags if the 'average' option is set).\n"
00059                         "By convention a bag is a sequence of rows of the vmat in which the last column of the target\n"
00060                         "indicates whether the row is the first one (and/or) the last one, with its two least significant bits:\n"
00061                         "   last_column_of_target == 1 ==> first row\n"
00062                         "   last_column_of_target == 2 ==> last row\n"
00063                         "   last_column_of_target == 0 ==> intermediate row\n"
00064                         "   last_column_of_target == 1+2==3 ==> single-row bag (both first and last).\n"
00065                         "The option n_samples controls how many terms in the sum are considered at a time:\n"
00066                         "   n_samples <= 0: sum over the whole vmat (e.g. for batch gradient computation)\n"
00067                         "   n_samples = 1: sum over a single bag at a time (e.g. for stochastic gradient)\n"
00068                         "                  where each fprop or fbprop advances to the next bag\n"
00069                         "   otherwise: sum over n_samples bags at a time (e.g. for min-batch training)\n"
00070                         "The last column of the target is not given in the call to f, but a bag_size input is provided instead.\n"
00071                         "The inputs to f are: (matrix of bag inputs, the bag size, the bag target, [the bag weight])\n"
00072                         "(the bag weight is included only if there are weights in the original VMat)."
00073     );
00074 
00076 // SumOverBagsVariable //
00078 SumOverBagsVariable::SumOverBagsVariable():
00079     average(false),
00080     max_bag_size(-1),
00081     n_samples(1),
00082     transpose(false),
00083     curpos()
00084 {}
00085 
00086 SumOverBagsVariable::SumOverBagsVariable(
00087         VMat the_vmat, Func the_f, int max_bagsize, int nsamples,
00088         bool the_average, bool the_transpose, bool call_build_):
00089     inherited(nonInputParentsOfPath(the_f->inputs,the_f->outputs), 
00090             the_f->outputs[0]->length(), 
00091             the_f->outputs[0]->width(),
00092             call_build_),
00093     vmat(the_vmat), f(the_f),
00094     average(the_average),
00095     max_bag_size(max_bagsize), n_samples(nsamples),
00096     transpose(the_transpose),
00097     curpos(0), bag_size(0)
00098 {
00099     if (call_build_)
00100         build_();
00101 }
00102 
00104 // build //
00106 void SumOverBagsVariable::build()
00107 {
00108     inherited::build();
00109     build_();
00110 }
00111 
00113 // build_ //
00115 void SumOverBagsVariable::build_()
00116 {
00117     if (vmat)
00118     {
00119         PLASSERT( f );
00120 
00121         varray = nonInputParentsOfPath(f->inputs, f->outputs);
00122         // We need to rebuild the parent class since a build option changed.
00123         inherited::build();
00124 
00125         if (f->outputs.size()!=1)
00126             PLERROR("SumOverBagsVariable: expected a func with a single output variable (you may use concat to form a single output Var)");
00127         if (vmat->weightsize()!=0 && vmat->weightsize()!=1)
00128             PLERROR("SumOverBagsVariable expected vmat->weightsize to be 0 or 1");
00129     
00130         if (transpose) {
00131             input_values.resize(vmat->inputsize(), max_bag_size);
00132         } else {
00133             input_values.resize(max_bag_size,vmat->inputsize());
00134         }
00135         output_value.resize(f->outputs[0]->nelems());
00136         output_av = Array<Vec>(output_value);
00137         gradient_av = Array<Vec>(gradient);
00138         f->inputs.setDontBpropHere(true);
00139 
00140         bag_size_vec.resize(1);
00141         bag_target_and_bag_signal.resize(vmat->targetsize());
00142         bag_target.resize(vmat->targetsize() - 1);
00143         bag_signal = bag_target_and_bag_signal.subVec(vmat->targetsize()-1,1);
00144         int ws = vmat->weightsize();
00145         bag_weight.resize(ws);
00146         if (ws > 0) {
00147             f_inputs.resize(4);
00148             f_inputs[3] = bag_weight;
00149         } else {
00150             f_inputs.resize(3);
00151         }
00152         f_inputs[0] = input_values.toVec();
00153         f_inputs[1] = bag_size_vec;
00154         f_inputs[2] = bag_target;
00155         unused_gradients.resize(f_inputs.size());
00156         for (int i=0;i<f_inputs.size();i++) unused_gradients[i] = f_inputs[i].copy();
00157     }
00158 }
00159 
00161 // declareOptions //
00163 void SumOverBagsVariable::declareOptions(OptionList& ol)
00164 {
00165     declareOption(ol, "f", &SumOverBagsVariable::f, OptionBase::buildoption, 
00166                   "    Func that is applied on each bag, whose input is the following array of Vars:\n"
00167                   "    (matrix of bag inputs, the bag size, the bag target, [the bag weight]).\n");
00168 
00169     declareOption(ol, "vmat", &SumOverBagsVariable::vmat, OptionBase::buildoption, 
00170                   "    VMatrix that contains the data, with multiple consecutive rows forming one bag.\n"
00171                   "    The last column of the target indicates the beginning and end of each bag, as follows:\n"
00172                   "   last_column_of_target == 1 ==> first row\n"
00173                   "   last_column_of_target == 2 ==> last row\n"
00174                   "   last_column_of_target == 0 ==> intermediate row\n"
00175                   "   last_column_of_target == 1+2==3 ==> single-row bag (both first and last).\n");
00176 
00177     declareOption(ol, "average", &SumOverBagsVariable::average, OptionBase::buildoption, 
00178                   "    If set to 1, then will compute the mean of the sum, and not the sum itself.");
00179 
00180     declareOption(ol, "max_bag_size", &SumOverBagsVariable::max_bag_size, OptionBase::buildoption, 
00181                   "    maximum number of examples in a bag (more than that in vmat will trigger a run-time error).\n");
00182 
00183     declareOption(ol, "n_samples", &SumOverBagsVariable::n_samples, OptionBase::buildoption, 
00184                   "    number of bags to iterate over (1 for online gradient, <=0 for batch).");
00185 
00186     declareOption(ol, "transpose", &SumOverBagsVariable::transpose, OptionBase::buildoption, 
00187                   "    If set to 1, then the bag inputs will be put in columns instead of rows.\n"
00188                   "    This can be useful if the Func f takes column vars as inputs.");
00189 
00190     inherited::declareOptions(ol);
00191 }
00192 
00194 // recomputeSize //
00196 void SumOverBagsVariable::recomputeSize(int& l, int& w) const
00197 {
00198     if (f && f->outputs.size()) {
00199         l = f->outputs[0]->length();
00200         w = f->outputs[0]->width();
00201     } else
00202         l = w = 0;
00203 }
00204 
00205 
00207 // makeDeepCopyFromShallowCopy //
00209 void SumOverBagsVariable::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00210 {
00211     inherited::makeDeepCopyFromShallowCopy(copies);
00212     deepCopyField(vmat, copies);
00213     deepCopyField(f, copies);
00214     deepCopyField(output_value, copies);
00215     deepCopyField(input_values, copies);
00216     deepCopyField(bag_size_vec, copies);
00217     deepCopyField(bag_target_and_bag_signal, copies);
00218     deepCopyField(bag_target, copies);
00219     deepCopyField(bag_signal, copies);
00220     deepCopyField(bag_weight, copies);
00221     deepCopyField(f_inputs, copies);
00222     deepCopyField(unused_gradients, copies);
00223     deepCopyField(output_av, copies);
00224     deepCopyField(gradient_av, copies);
00225 }
00226 
00227 
00229 // fpropOneBag //
00231 void SumOverBagsVariable::fpropOneBag(bool do_bprop)
00232 {
00233     static real dummy_weight=0;
00234     bool reached_end_of_bag=false;
00235     if (transpose) {
00236         input_values.resize(input_values.length(), max_bag_size);
00237     } else {
00238         input_values.resize(max_bag_size,input_values.width());
00239     }
00240     for (bag_size=0;!reached_end_of_bag;bag_size++)
00241     {
00242         if (bag_size>=max_bag_size)
00243             PLERROR("SumOverBagsVariable: bag size=%d > expected max. bag size(%d)",
00244                     bag_size,max_bag_size);
00245         Vec input_value;
00246         if (transpose) {
00247             input_value.resize(input_values.length());
00248         } else {
00249             input_value = input_values(bag_size);
00250         }
00251         if (vmat->weightsize()>0)
00252         {
00253             real& weight = bag_weight[0];
00254             vmat->getExample(curpos,input_value,bag_target_and_bag_signal,weight);
00255         }
00256         else
00257             vmat->getExample(curpos, input_value,
00258                              bag_target_and_bag_signal, dummy_weight);
00259         if (bag_size == 0) {
00260             // It's the first element: we copy the good target.
00261             bag_target << bag_target_and_bag_signal.subVec(
00262                                     0, bag_target_and_bag_signal.length() - 1);
00263         } else {
00264 #ifdef BOUNDCHECK
00265             // Safety check: make sure the target is the same for all elements
00266             // in the bag.
00267             Vec targ = bag_target_and_bag_signal.subVec(
00268                     0, bag_target_and_bag_signal.length() - 1);
00269             PLASSERT( targ.length() == bag_target.length() );
00270             for (int i = 0; i < targ.length(); i++)
00271                 if (!is_equal(bag_target[i], targ[i]))
00272                     PLERROR("In SumOverBagsVariable::fpropOneBag - A bag must "
00273                             "have the same target across all elements in it");
00274 #endif
00275         }
00276         if (transpose) {
00277             // Need to put input_value into input_values, because it uses a separate
00278             // storage.
00279             input_values.column(bag_size) << input_value;
00280         }
00281         if (bag_size==0 && !(int(bag_signal[0]) & 1))
00282             PLERROR("SumOverBagsVariable: data synchronization error, first row of bag has wrong bag signal");
00283         reached_end_of_bag = (int(bag_signal[0]) & 2) != 0;
00284         if(++curpos == vmat->length())
00285         {
00286             curpos = 0;
00287             if (!reached_end_of_bag)
00288             {
00289                 PLERROR("SumOverBagsVariable: last bag of VMatrix is not complete");
00290                 return;
00291             }
00292         }
00293     }
00294     bag_size_vec[0]=bag_size;
00295     if (do_bprop)
00296         f->fbprop(f_inputs,output_av,unused_gradients,gradient_av);
00297     else
00298         f->fprop(f_inputs,output_av);
00299     value += output_value;
00300 }
00301 
00303 // fprop //
00305 void SumOverBagsVariable::fprop()
00306 {
00307     value.clear();
00308     f->recomputeParents();
00309     if (n_samples==1)
00310         fpropOneBag();
00311     else if (n_samples<=0) // one pass through the whole data set
00312     {
00313         curpos=0;
00314         int count_bags = 0;
00315         do {
00316             fpropOneBag();
00317             count_bags++;
00318         }
00319         while (curpos>0);
00320         if (average) {
00321             value /= count_bags;
00322         }
00323     }
00324     else {
00325         for (int i=0;i<n_samples;i++)
00326             fpropOneBag();
00327         if (average) {
00328             value /= n_samples;
00329         }
00330     }
00331 }
00332 
00333 
00335 // fbprop //
00337 void SumOverBagsVariable::fbprop()
00338 {
00339     value.clear();
00340     f->recomputeParents();
00341     if (n_samples==1)
00342         fpropOneBag(true);
00343     else if (n_samples<=0) // one pass through the whole data set
00344     {
00345         if (average) {
00346             // We don't know in advance how many bags there are, so the gradient
00347             // can't be propagated correctly.
00348             PLERROR("In SumOverBagsVariable::fbprop - If you want to get the average, you must tell me the number of bags in n_samples > 0, because I'm too dumb to guess it.");
00349         }
00350         curpos = 0;
00351         do {
00352             fpropOneBag(true);
00353         }
00354         while (curpos>0);
00355     }
00356     else {
00357         if (average) {
00358             gradient /= n_samples;
00359         }
00360         for (int i=0;i<n_samples;i++)
00361             fpropOneBag(true);
00362         if (average) {
00363             value /= n_samples;
00364         }
00365     }
00366 }
00367 
00369 // bprop //
00371 void SumOverBagsVariable::bprop()
00372 { 
00373     fbprop();
00374 }
00375 
00377 // printInfo //
00379 void SumOverBagsVariable::printInfo(bool print_gradient)
00380 {
00381     f->fproppath.printInfo(print_gradient);
00382     cout << info() << " : " << getName() << "(max_bag_size=" << max_bag_size << ", ";
00383     cout << ", n_samples=" << n_samples << ") = " << value;
00384     if (print_gradient) cout << " gradient=" << gradient;
00385     cout << endl; 
00386 }
00387 
00388 
00389 } // end of namespace PLearn
00390 
00391 
00392 /*
00393   Local Variables:
00394   mode:c++
00395   c-basic-offset:4
00396   c-file-style:"stroustrup"
00397   c-file-offsets:((innamespace . 0)(inline-open . 0))
00398   indent-tabs-mode:nil
00399   fill-column:79
00400   End:
00401 */
00402 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines