PLearn 0.1
MultiMaxVariable.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // MultiMaxVariable.cc
00004 //
00005 // Copyright (C) 2007 Simon Lemieux, Pascal Vincent
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Simon Lemieux, Pascal Vincent
00036 
00040 #include "MultiMaxVariable.h"
00041 
00042 namespace PLearn {
00043 using namespace std;
00044 
00047 PLEARN_IMPLEMENT_OBJECT(
00048     MultiMaxVariable,
00049     "Different max variables done on separate groups of the input",
00050 "This variables computes a max functions (softmax, log-softmax, hardmax, etc., determined by the field computation_type)" 
00051 "\non subvectors of the input, which lengths are defined by the field groupsizes (or groupsize if all the groups will have the same size)." 
00052 "\n"
00053 "\nex :"
00054 "\nif groupsizes = [1,2,3], and computation_type = 'S' (for softmax), and the input vector [1,2,3,4,5,6],"
00055 "\nthe result will be [softmax([1]), softmax([2,3]), softmax([4,5,6])]"
00056 "\n"
00057 "\nnote : in that example matValue.width() of the variable must be 1+2+3=6" );
00058 
00059 
00061 MultiMaxVariable::MultiMaxVariable(Variable* input, TVec<int> groupsizes, char computation_type)
00062     : inherited(input, input->length(), input->width()),
00063       groupsizes(groupsizes),
00064       computation_type(computation_type)
00065 {
00066     build_();
00067 }
00068 
00069 
00070 MultiMaxVariable::MultiMaxVariable(Variable* input, int groupsize, char computation_type)
00071     : inherited(input, input->length(), input->width()),
00072       computation_type(computation_type),
00073       groupsize(groupsize)
00074 {
00075     build_();
00076 }
00077 
00078 void MultiMaxVariable::recomputeSize(int& l, int& w) const
00079 {
00080         if (input) {
00081             l = input->length();
00082             w = input->width() ;
00083         } else
00084             l = w = 0;
00085 }
00086 
00087 // ### computes value from input's value
00088 void MultiMaxVariable::fprop()
00089 {
00090     int k;
00091     Mat inputValue = input->matValue;
00092 
00093     Vec inputValue_n;
00094     Vec value_n;
00095 
00096     for(int n=0; n<inputValue.length(); n++)
00097     {
00098         k=0;
00099         inputValue_n = inputValue(n);
00100         value_n = matValue(n);
00101         
00102         for(int i=0; i<groupsizes.length(); i++)
00103         {
00104             switch(computation_type)
00105             {
00106 //softmax
00107             case 'S':
00108 //          softmax(v.subVec(k,k+groupsizes[i]), value.subVec(k,k+groupsizes[i]));
00109                 softmax_range(inputValue_n, value_n, k, groupsizes[i]);                   
00110                 break;
00111 //log_softmax
00112             case 'L':
00113 //          log_softmax(v.subVec(k,k+groupsizes[i]), value.subVec(k,k+groupsizes[i]));
00114                 logSoftmax_range(inputValue_n, value_n, k, groupsizes[i]);
00115                 break;
00116 //hardmax_value
00117             case 'H':
00118                 hardMax_range(inputValue_n, value_n, k, groupsizes[i], true);
00119                 break;
00120 //hardmax
00121             case 'h':
00122                 hardMax_range(inputValue_n, value_n, k, groupsizes[i], false);
00123                 break;
00124 //random_softmax_value
00125             case 'R':
00126                 softmax_range(inputValue_n, value_n, k, groupsizes[i]);
00127                 //TODO : RANDOM
00128                 PLERROR("computation_type 'R' not fully implemented yet");
00129                 break;
00130 //random_softmax
00131             case 'r':
00132                 softmax_range(inputValue_n, value_n, k, groupsizes[i]);
00133                 //TODO : RANDOM
00134                 PLERROR("computation_type 'r' not fully implemented yet");
00135                 break;
00136             default :
00137                 PLERROR("invalid computation_type in MultiMaxVariable");
00138             }
00139             k+=groupsizes[i];
00140         }
00141     }
00142 }
00143 
00144 // ### computes input's gradient from gradient
00145 void MultiMaxVariable::bprop()
00146 {
00147     int k;
00148     Mat inputGradient = input->matGradient;
00149     int l = inputGradient.length();
00150     Vec inputGradient_n;
00151     Vec value_n;
00152     Vec gradient_n;
00153     for(int n=0; n<l; n++)
00154     {
00155         inputGradient_n = inputGradient(n);
00156         value_n = matValue(n);
00157         gradient_n = matGradient(n);
00158         k=0;
00159         for(int i=0; i<groupsizes.length(); i++)
00160         {
00161             switch(computation_type)
00162             {
00163 //softmax
00164             case 'S':
00165                 bpropSoftMax(inputGradient_n, gradient_n, value_n, k, groupsizes[i]);
00166                 break;
00167 //log_softmax
00168             case 'L':
00169                 //ici aussi j'ai tout copié, en changeant seulement les "bornes" de la sommation
00170                 bpropLogSoftMax(inputGradient_n, gradient_n, value_n, k, groupsizes[i]); 
00171                 break;
00172 //hardmax_value
00173             case 'H':
00174                 bpropHardMaxValue(inputGradient_n, gradient_n, value_n, k, groupsizes[i]);
00175                 break;
00176 //hardmax
00177             case 'h':
00178                 PLERROR("computation_type 'h' not implemented yet");
00179                 break;
00180 //random_softmax_value
00181             case 'R':
00182                 PLERROR("computation_type 'R' not implemented yet");
00183                 break;
00184 //random_softmax
00185             case 'r':
00186                 PLERROR("computation_type 'r' not implemented yet");
00187                 break;
00188             default :
00189                 PLERROR("unable to bprop because of invalid computation_type");
00190             }
00191             k+=groupsizes[i];
00192         }
00193     }
00194 }    
00195 // ### You can implement these methods:
00196 // void MultiMaxVariable::bbprop() {}
00197 // void MultiMaxVariable::symbolicBprop() {}
00198 // void MultiMaxVariable::rfprop() {}
00199 
00200 
00201 // ### Nothing to add here, simply calls build_
00202 void MultiMaxVariable::build()
00203 {
00204     inherited::build();
00205     build_();
00206 }
00207 
00208 void MultiMaxVariable::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00209 {
00210     inherited::makeDeepCopyFromShallowCopy(copies);
00211 
00212     // ### Call deepCopyField on all "pointer-like" fields
00213     // ### that you wish to be deepCopied rather than
00214     // ### shallow-copied.
00215     // ### ex:
00216     deepCopyField(groupsizes, copies);
00217     // ### If you want to deepCopy a Var field:
00218     // varDeepCopyField(somevariable, copies);   
00219 }
00220 
00221 void MultiMaxVariable::declareOptions(OptionList& ol)
00222 {
00223     // ### Declare all of this object's options here.
00224     // ### For the "flags" of each option, you should typically specify
00225     // ### one of OptionBase::buildoption, OptionBase::learntoption or
00226     // ### OptionBase::tuningoption. If you don't provide one of these three,
00227     // ### this option will be ignored when loading values from a script.
00228     // ### You can also combine flags, for example with OptionBase::nosave:
00229     // ### (OptionBase::buildoption | OptionBase::nosave)
00230 
00231     // ### ex:
00232     declareOption(ol, "groupsizes", &MultiMaxVariable::groupsizes,
00233                   OptionBase::buildoption,
00234                   "this tells how to \"divide\" our diffrents inputs\nex: groupsizes = [1,2,3] says we divide our output like this :\n[x1],[x2,x3],[x4,x5,x6] and apply a maximum algorithm on each group separately");
00235 
00236     declareOption(ol, "groupsize", &MultiMaxVariable::groupsize,
00237                   OptionBase::buildoption,
00238                   "shortcut if you want all groupsizes to be equals, for example if you set the value of this option to be 3, it will make groupsizes = [3,3,...,3]");
00239 
00240     declareOption(ol, "computation_type", &MultiMaxVariable::computation_type,
00241                   OptionBase::buildoption,
00242                   "specifies what maximum algorithm should be used on our groups\n\'S\' = Softmax\n\'L\' = Log(Softmax)\n\'H\' = Hardmax*value\n\'h\' = hardmax\n\'R\' = random_Softmax*value\n\'r\' = random_Softmax");
00243             
00244     // Now call the parent class' declareOptions
00245     inherited::declareOptions(ol);
00246 }
00247 
00248 void MultiMaxVariable::build_()
00249 {
00250     // ### This method should do the real building of the object,
00251     // ### according to set 'options', in *any* situation.
00252     // ### Typical situations include:
00253     // ###  - Initial building of an object from a few user-specified options
00254     // ###  - Building of a "reloaded" object: i.e. from the complete set of
00255     // ###    all serialised options.
00256     // ###  - Updating or "re-building" of an object after a few "tuning"
00257     // ###    options have been modified.
00258     // ### You should assume that the parent class' build_() has already been
00259     // ### called.
00260     
00261     if (input.isNotNull() ) // otherwise postpone building until we have an input!
00262     {
00263         if (groupsizes.length() <= 0)
00264         {
00265             if (groupsize <= 0)
00266                 PLERROR("Groupsize(s) not specified or invalid in MultiMaxVariable");    
00267             if (input->width() % groupsize != 0)
00268                 PLERROR("Invalid groupsize in MultiMaxVariable");
00269 
00270             TVec<int> vec(input->width()/groupsize, groupsize);
00271             groupsizes = vec;
00272         }
00273         else
00274         {
00275             int sum = 0;
00276             for(int i=0; i<groupsizes.length(); i++)
00277                 sum += groupsizes[i];       
00278             if(sum != input->width())
00279                 PLERROR("Invalid groupsizes in MultiMaxVariable");    
00280         }
00281     }
00282 }
00283 
00284 
00286 // some utils //
00288 
00289 void MultiMaxVariable::softmax_range(Vec &x, Vec &y, int start, int length)
00290 {
00291     real somme=0;
00292     for(int i=start; i<start+length; i++)    
00293         somme +=safeexp(x[i]);
00294     if (somme == 0) PLERROR("trying to divide by 0 in softmax");
00295 
00296     for(int i=start; i<start+length; i++)
00297         y[i] = safeexp(x[i])/somme;
00298 }
00299 
00300 void MultiMaxVariable::logSoftmax_range(Vec &x, Vec &y, int start, int length)
00301 {
00302     real somme=0;
00303     for(int i=start; i<start+length; i++)    
00304         somme += safeexp(x[i]);
00305     
00306     for(int i=start; i<start+length; i++)
00307         y[i] = x[i] - safelog(somme);
00308 }
00309 
00310 void MultiMaxVariable::hardMax_range(Vec &x, Vec &y, int start, int length, bool value)
00311 {
00312     int indMax=start;
00313     for(int i=start+1; i<start+length; i++)
00314         if(x[i] > x[indMax])
00315             indMax = i;
00316 
00317     for(int i=start; i<start+length; i++)
00318         y[i] = 0;
00319 
00320     if(value)
00321         y[indMax] = x[indMax];
00322     else
00323         y[indMax]=1;
00324 }
00325 
00326 
00327 void MultiMaxVariable::bpropSoftMax(Vec &gradientInput, Vec &gradient, Vec &variableValue, int start, int length)
00328 {
00329     //on parcout le gradient de notre vecteur
00330     for(int i=start; i<start+length; i++)
00331     {
00332         //et on rajoute un petit qqch pour chacun du gradient de l'input
00333         for(int j=start; j<start+length; j++)
00334         {
00335             //note ici jai juste copié ce quil y avait avant dans le bprob de softmaxVariable...
00336             if(i==j)
00337                 gradientInput[i] += gradient[j]*variableValue[i]*(1.-variableValue[i]);
00338             else
00339                 gradientInput[i] -= gradient[j]*variableValue[i]*variableValue[j];
00340         }
00341     }           
00342 }
00343 
00344 
00345 //ici j'ai juste adapté le LogSoftMax qui existait déjà
00346 void MultiMaxVariable::bpropLogSoftMax(Vec &gradientInput, Vec &gradient, Vec &variableValue, int start, int length)
00347 {
00348     real sum=0.;
00349     for (int i = start; i < start+length; i++)
00350         sum += gradient[i];
00351 
00352     for (int i = start; i < start+length; ++i)
00353         gradientInput[i] += gradient[i] - sum * safeexp(variableValue[i]);    
00354 }
00355 
00356 void MultiMaxVariable::bpropHardMaxValue(Vec& gradientInput, Vec& gradient, Vec& variableValue, int start, int length)
00357 {
00358     real sum=0.;
00359     for(int i=start; i<start+length; i++)
00360         sum += gradient[i];
00361 
00362     for(int i=start; i<start+length; i++)
00363         if(variableValue[i] != 0)
00364             gradientInput[i] += sum;
00365 }
00366 
00367 } // end of namespace PLearn
00368 
00369 
00370 /*
00371   Local Variables:
00372   mode:c++
00373   c-basic-offset:4
00374   c-file-style:"stroustrup"
00375   c-file-offsets:((innamespace . 0)(inline-open . 0))
00376   indent-tabs-mode:nil
00377   fill-column:79
00378   End:
00379 */
00380 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines