PLearn 0.1
RegressionTreeMulticlassLeave.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // RegressionTreeMulticlassLeave.cc
00004 // Copyright (c) 1998-2002 Pascal Vincent
00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal
00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 // 
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 // 
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 // 
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 // 
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 // 
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 
00037 /* ********************************************************************************    
00038  * $Id: RegressionTreeMulticlassLeave.cc, v 1.0 2004/07/19 10:00:00 Bengio/Kegl/Godbout    *
00039  * This file is part of the PLearn library.                                     *
00040  ******************************************************************************** */
00041 
00042 #include "RegressionTreeMulticlassLeave.h"
00043 #include "RegressionTreeRegisters.h"
00044 #include <plearn/math/TMat_maths_impl.h>
00045 
00046 namespace PLearn {
00047 using namespace std;
00048 
00049 PLEARN_IMPLEMENT_OBJECT(RegressionTreeMulticlassLeave,
00050                         "Object to represent the leaves of a regression tree.",
00051                         "It maintains the necessary statistics to compute the output and the train error\n"
00052                         "of the samples in the leave.\n"
00053     );
00054 
00055 RegressionTreeMulticlassLeave::RegressionTreeMulticlassLeave()
00056     : objective_function("l1")
00057 {
00058     build();
00059 }
00060 
00061 RegressionTreeMulticlassLeave::~RegressionTreeMulticlassLeave()
00062 {
00063 }
00064 
00065 void RegressionTreeMulticlassLeave::declareOptions(OptionList& ol)
00066 { 
00067     declareOption(ol, "multiclass_outputs", 
00068                   &RegressionTreeMulticlassLeave::multiclass_outputs,
00069                   OptionBase::buildoption,
00070                   "A vector of possible output values when solving a multiclass problem.\n"
00071                   "The leave will output the value with the largest weight sum.");
00072     declareOption(ol, "objective_function",
00073                   &RegressionTreeMulticlassLeave::objective_function,
00074                   OptionBase::buildoption,
00075                   "The function to be used to compute the leave error.\n"
00076                   "Current supported values are l1 and l2 (default is l1).");
00077       
00078     declareOption(ol, "multiclass_weights_sum",
00079                   &RegressionTreeMulticlassLeave::multiclass_weights_sum,
00080                   OptionBase::learntoption,
00081                   "A vector to count the weight sum of each possible output "
00082                   "for the sample in this leave.\n");
00083     declareOption(ol, "l1_loss_function_factor",
00084                   &RegressionTreeMulticlassLeave::l1_loss_function_factor,
00085                   OptionBase::learntoption,
00086                   "2 / loss_function_weight.\n");
00087     declareOption(ol, "l2_loss_function_factor",
00088                   &RegressionTreeMulticlassLeave::l2_loss_function_factor,
00089                   OptionBase::learntoption,
00090                   "2 / pow(loss_function_weight, 2.0).\n");
00091     inherited::declareOptions(ol);
00092 }
00093 
00094 void RegressionTreeMulticlassLeave::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00095 {
00096     inherited::makeDeepCopyFromShallowCopy(copies);
00097     deepCopyField(multiclass_outputs, copies);
00098     deepCopyField(objective_function, copies);
00099     deepCopyField(l1_loss_function_factor, copies);
00100     deepCopyField(l2_loss_function_factor, copies);
00101     deepCopyField(multiclass_weights_sum, copies);
00102 }
00103 
00104 void RegressionTreeMulticlassLeave::build()
00105 {
00106     inherited::build();
00107     build_();
00108 }
00109 
00110 void RegressionTreeMulticlassLeave::build_()
00111 {
00112 }
00113 
00114 void RegressionTreeMulticlassLeave::initStats()
00115 {
00116     length_ = 0;
00117     weights_sum = 0.0;
00118     if (loss_function_weight != 0.0)
00119     {
00120         l1_loss_function_factor = 2.0 / loss_function_weight;
00121         l2_loss_function_factor = 2.0 / pow(loss_function_weight, 2);
00122     }
00123     else
00124     {
00125         l1_loss_function_factor = 1.0;
00126         l2_loss_function_factor = 1.0;
00127     }
00128     multiclass_weights_sum.resize(multiclass_outputs.length());
00129     multiclass_weights_sum.fill(0);
00130 }
00131 
00132 void RegressionTreeMulticlassLeave::addRow(int row)
00133 {
00134     real weight = train_set->getWeight(row);
00135     real target = train_set->getTarget(row);
00136     addRow(row, target, weight);
00137 }
00138 
00139 void RegressionTreeMulticlassLeave::addRow(int row, real target, real weight,
00140                                  Vec outputv, Vec errorv)
00141 {
00142     addRow(row, target, weight);
00143     getOutputAndError(outputv,errorv);
00144 }
00145 
00146 void RegressionTreeMulticlassLeave::addRow(int row, real target, real weight)
00147 {
00148     length_ += 1;
00149     weights_sum += weight;
00150     int multiclass_found = 0;
00151     //if target are 0,1,2,... it can be optimized by multiclass_weights_sum[target]
00152     //for the general case: by using a table with index being the target and the value the needed index
00153     for (int mc_ind = 0; mc_ind < multiclass_outputs.length(); mc_ind++)
00154     {
00155         if (target == multiclass_outputs[mc_ind])
00156         {
00157             multiclass_weights_sum[mc_ind] += weight;
00158             multiclass_found = 1;      
00159             break;      
00160         }
00161     }
00162     if (multiclass_found < 1) 
00163         PLERROR("RegressionTreeMultilassLeave: Unknown target: %g row: %d\n",
00164                 target, row);
00165 }
00166 
00167 void RegressionTreeMulticlassLeave::addRow(int row, Vec outputv, Vec errorv)
00168 {
00169     addRow(row);
00170     getOutputAndError(outputv,errorv);    
00171 }
00172 
00173 void RegressionTreeMulticlassLeave::removeRow(int row, Vec outputv, Vec errorv)
00174 {
00175     real weight = train_set->getWeight(row);
00176     real target = train_set->getTarget(row);
00177     removeRow(row,target,weight,outputv,errorv);
00178 }
00179 
00180 void RegressionTreeMulticlassLeave::removeRow(int row, real target, real weight,
00181                                  Vec outputv, Vec errorv){
00182     removeRow(row,target,weight);
00183     getOutputAndError(outputv,errorv);
00184 }
00185 
00186 void RegressionTreeMulticlassLeave::removeRow(int row, real target, real weight)
00187 {
00188     length_ -= 1;
00189     weights_sum -= weight;
00190     PLASSERT(length_>=0);
00191     PLASSERT(weights_sum>=0);
00192     PLASSERT(length_>0 || weights_sum==0);
00193     bool found=false;
00194     //can be optimized: see addRow
00195     for (int mc_ind = 0; mc_ind < multiclass_outputs.length(); mc_ind++)
00196     {
00197         if (target == multiclass_outputs[mc_ind])
00198         {
00199             multiclass_weights_sum[mc_ind] -= weight;
00200             found=true;
00201             break;      
00202         }
00203     }
00204     PLASSERT(found);
00205 }
00206 
00207 void RegressionTreeMulticlassLeave::getOutputAndError(Vec& output, Vec& error)const
00208 {
00209 #ifdef BOUNDCHECK
00210     if(multiclass_outputs.length()<=0)
00211         PLERROR("In RegressionTreeMulticlassLeave::getOutputAndError() -"
00212                 " multiclass_outputs must not be empty");
00213 #endif
00214     if(length_==0){        
00215         output.clear();
00216         output[0]=MISSING_VALUE;
00217         error.clear();
00218         return;
00219     }
00220     real conf = 0;
00221     int mc_winer = 0;
00222     //index of the max. Is their an optimized version?
00223     for (int mc_ind = 1; mc_ind < multiclass_outputs.length(); mc_ind++)
00224     {
00225         if (multiclass_weights_sum[mc_ind] > multiclass_weights_sum[mc_winer])
00226             mc_winer = mc_ind;
00227     }
00228     output[0] = multiclass_outputs[mc_winer];
00229     if (RTR_HAVE_MISSING && missing_leave)
00230     {
00231         error[0] = 0.0;
00232         error[1] = weights_sum;
00233         error[2] = 0.0;
00234     }
00235     else
00236     {
00237         conf = multiclass_weights_sum[mc_winer] / weights_sum;;
00238         error[0] = 0.0;
00239         if (objective_function == "l1")
00240         {
00241             for (int mc_ind = 0; mc_ind < multiclass_outputs.length();mc_ind++)
00242             {
00243                 error[0] += abs(output[0] - multiclass_outputs[mc_ind]) 
00244                     * multiclass_weights_sum[mc_ind];
00245             }
00246             error[0] *= l1_loss_function_factor * length_ / weights_sum;
00247             if (error[0] < 1E-10) error[0] = 0.0;
00248             if (error[0] > weights_sum * l1_loss_function_factor)
00249                 error[2] = weights_sum * l1_loss_function_factor;
00250             else error[2] = error[0];
00251         }
00252         else
00253         {
00254             for (int mc_ind = 0; mc_ind < multiclass_outputs.length();mc_ind++)
00255             {
00256                 error[0] += pow(output[0] - multiclass_outputs[mc_ind], 2) 
00257                     * multiclass_weights_sum[mc_ind];
00258             }
00259             error[0] *= l2_loss_function_factor * length_ / weights_sum;
00260             if (error[0] < 1E-10) error[0] = 0.0;
00261             if (error[0] > weights_sum * l2_loss_function_factor) 
00262                 error[2] = weights_sum * l2_loss_function_factor; 
00263             else error[2] = error[0];
00264         }
00265         error[1] = (1.0 - conf) * length_;
00266     }
00267     if(output_confidence_target) output[1] = conf;
00268 }
00269 
00270 TVec<string> RegressionTreeMulticlassLeave::getOutputNames() const
00271 {
00272     TVec<string> ret;
00273     ret.append("class_pred");
00274     if(output_confidence_target)
00275         ret.append("confidence");
00276     return ret;
00277 }
00278 
00279 void RegressionTreeMulticlassLeave::addLeave(PP<RegressionTreeLeave> leave_){
00280     PP<RegressionTreeMulticlassLeave> leave = (PP<RegressionTreeMulticlassLeave>) leave_;
00281     if(leave->classname() == classname()){
00282         length_ += leave->length_;
00283         weights_sum += leave->weights_sum;
00284         multiclass_weights_sum += leave->multiclass_weights_sum;
00285     }else
00286         PLERROR("In %s::addLeave the leave to add should have the same class. It have %s.",
00287                 classname().c_str(), leave->classname().c_str());
00288 }
00289 
00290 void RegressionTreeMulticlassLeave::removeLeave(PP<RegressionTreeLeave> leave_){
00291     PP<RegressionTreeMulticlassLeave> leave = (PP<RegressionTreeMulticlassLeave>) leave_;
00292     if(leave->classname() == classname()){
00293         length_ -= leave->length_;
00294         weights_sum -= leave->weights_sum;
00295         multiclass_weights_sum -= leave->multiclass_weights_sum;
00296     }else
00297         PLERROR("In %s::addLeave the leave to add should have the same class. It have %s.",
00298                 classname().c_str(), leave->classname().c_str());
00299 }
00300 
00301 void RegressionTreeMulticlassLeave::printStats()
00302 {
00303     cout << " l " << length_;
00304     Vec output(2);
00305     Vec error(3);
00306     getOutputAndError(output,error);
00307     cout << " o0 " << output[0];
00308     cout << " o1 " << output[1];
00309     cout << " e0 " << error[0];
00310     cout << " e1 " << error[1];
00311     cout << " ws " << weights_sum;
00312     cout << endl;
00313     cout << " mws " << multiclass_weights_sum << endl;
00314 }
00315 
00316 } // end of namespace PLearn
00317 
00318 
00319 /*
00320   Local Variables:
00321   mode:c++
00322   c-basic-offset:4
00323   c-file-style:"stroustrup"
00324   c-file-offsets:((innamespace . 0)(inline-open . 0))
00325   indent-tabs-mode:nil
00326   fill-column:79
00327   End:
00328 */
00329 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines