PLearn 0.1
RegressionTreeLeave.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // RegressionTreeLeave.cc
00004 // Copyright (c) 1998-2002 Pascal Vincent
00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal
00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 // 
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 // 
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 // 
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 // 
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 // 
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 
00037 /* ********************************************************************************    
00038  * $Id: RegressionTreeLeave.cc, v 1.0 2004/07/19 10:00:00 Bengio/Kegl/Godbout    *
00039  * This file is part of the PLearn library.                                     *
00040  ******************************************************************************** */
00041 
00042 #include "RegressionTreeLeave.h"
00043 #include "RegressionTreeRegisters.h"
00044 
00045 namespace PLearn {
00046 using namespace std;
00047 
00048 PLEARN_IMPLEMENT_OBJECT(RegressionTreeLeave,
00049                         "Object to represent the leaves of a regression tree.",
00050                         "It maintains the necessary statistics to compute the output and the train error\n"
00051                         "of the samples in the leave.\n"
00052     );
00053 
00054 int RegressionTreeLeave::verbosity = 0;
00055 Vec RegressionTreeLeave::dummy_vec;
00056 bool RegressionTreeLeave::output_confidence_target = false;
00057 
00058 RegressionTreeLeave::RegressionTreeLeave():
00059     missing_leave(false),
00060     loss_function_weight(0),
00061     id(0),
00062     length_(0),
00063     weights_sum(0),
00064     targets_sum(0),
00065     weighted_targets_sum(0),
00066     weighted_squared_targets_sum(0),
00067     loss_function_factor(1)
00068 {
00069     build();
00070 }
00071 
00072 RegressionTreeLeave::~RegressionTreeLeave()
00073 {
00074 }
00075 
00076 void RegressionTreeLeave::declareOptions(OptionList& ol)
00077 { 
00078     declareOption(ol, "id", &RegressionTreeLeave::id, OptionBase::buildoption,
00079                   "The id of this leave to register the rows of the RegressionTreeRegisters\n");
00080     declareOption(ol, "missing_leave", &RegressionTreeLeave::missing_leave, OptionBase::buildoption,
00081                   "The indicator that it is a leave with missing values for the split feature\n");
00082     declareOption(ol, "loss_function_weight", &RegressionTreeLeave::loss_function_weight, OptionBase::buildoption,
00083                   "The hyper parameter to balance the error and the confidence factor\n");
00084     declareStaticOption(ol, "verbosity", &RegressionTreeLeave::verbosity, OptionBase::buildoption,
00085                   "The desired level of verbosity\n");
00086     declareOption(ol, "train_set", &RegressionTreeLeave::train_set, 
00087                   OptionBase::buildoption | OptionBase::nosave,
00088                   "The train set with the sorted row index matrix and the leave id vector\n");
00089     declareOption(ol, "length", &RegressionTreeLeave::length_, OptionBase::learntoption,
00090                   "The number of rows in this leave\n");
00091     declareOption(ol, "weights_sum", &RegressionTreeLeave::weights_sum, OptionBase::learntoption,
00092                   "The sum of weights for the samples in this leave\n");
00093     declareOption(ol, "targets_sum", &RegressionTreeLeave::targets_sum, OptionBase::learntoption,
00094                   "The sum of targets for the samples in this leave\n");
00095     declareOption(ol, "weighted_targets_sum", &RegressionTreeLeave::weighted_targets_sum, OptionBase::learntoption,
00096                   "The sum of weighted targets for the samples in this leave\n");
00097     declareOption(ol, "weighted_squared_targets_sum", &RegressionTreeLeave::weighted_squared_targets_sum, OptionBase::learntoption,
00098                   "The sum of squared weighted target values for the samples in this leave\n");
00099     declareOption(ol, "loss_function_factor", &RegressionTreeLeave::loss_function_factor, OptionBase::learntoption,
00100                   "2 / pow(loss_function_weight, 2.0).\n");
00101 
00102     declareStaticOption(ol, "output_confidence_target",
00103                   &RegressionTreeLeave::output_confidence_target,
00104                   OptionBase::buildoption,
00105                   "If false the output size is 1 and contain only the predicted"
00106                   " target. Else output size is 2 and contain also the"
00107                   " confidence\n");
00108 
00109     declareStaticOption(ol, "output", &RegressionTreeLeave::dummy_vec, OptionBase::nosave,
00110                   "DEPRECATED");
00111     declareStaticOption(ol, "error", &RegressionTreeLeave::dummy_vec, OptionBase::nosave,
00112                   "DEPRECATED");
00113 
00114     inherited::declareOptions(ol);
00115 }
00116 
00117 void RegressionTreeLeave::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00118 {
00119     inherited::makeDeepCopyFromShallowCopy(copies);
00120     //we don't deep copy it as we don't modify it 
00121     //and this is a link to the RegressionTree train_set
00122 //    deepCopyField(train_set, copies);
00123 }
00124 
00125 void RegressionTreeLeave::build()
00126 {
00127     inherited::build();
00128     build_();
00129 }
00130 
00131 void RegressionTreeLeave::build_()
00132 {
00133 }
00134 
00135 void RegressionTreeLeave::initLeave(PP<RegressionTreeRegisters> the_train_set, RTR_type_id the_id, bool the_missing_leave)
00136 {
00137     train_set = the_train_set;
00138     id = the_id;
00139     missing_leave = the_missing_leave;
00140 }
00141 
00142 void RegressionTreeLeave::initStats()
00143 {
00144     length_ = 0;
00145     weights_sum= 0.0;
00146     targets_sum = 0.0;
00147     weighted_targets_sum = 0.0;
00148     weighted_squared_targets_sum = 0.0; 
00149     if (loss_function_weight != 0.0) 
00150         loss_function_factor = 2.0 / pow(loss_function_weight, 2);
00151     else loss_function_factor = 1.0;
00152 }
00153 
00154 void RegressionTreeLeave::addRow(int row, real target, real weight)
00155 {
00156     length_ += 1;
00157     weights_sum += weight;
00158     targets_sum += target;
00159     real squared_target = pow(target, 2);
00160     weighted_targets_sum += weight * target;
00161     weighted_squared_targets_sum += weight * squared_target;  
00162 }
00163 
00164 void RegressionTreeLeave::addRow(int row)
00165 {
00166     real weight = train_set->getWeight(row);
00167     real target = train_set->getTarget(row);
00168     addRow(row, target, weight);
00169 }
00170 
00171 void RegressionTreeLeave::addRow(int row, real target, real weight,
00172                                  Vec outputv, Vec errorv)
00173 {
00174     addRow(row, target, weight);
00175     getOutputAndError(outputv,errorv);
00176 }
00177 
00178 void RegressionTreeLeave::addRow(int row, Vec outputv, Vec errorv)
00179 {
00180     addRow(row);
00181     getOutputAndError(outputv,errorv);
00182 }
00183 void RegressionTreeLeave::removeRow(int row, Vec output, Vec error)
00184 {
00185     real weight = train_set->getWeight(row);
00186     real target = train_set->getTarget(row);
00187     removeRow(row, target, weight, output, error);
00188 }
00189 void RegressionTreeLeave::removeRow(int row, real target, real weight)
00190 {
00191     length_ -= 1;
00192     weights_sum -= weight;
00193     targets_sum -= target;
00194     real squared_target = pow(target, 2);
00195     weighted_targets_sum -= weight * target;
00196     weighted_squared_targets_sum -= weight * squared_target; 
00197 }
00198 void RegressionTreeLeave::removeRow(int row, real target, real weight,
00199                                     Vec outputv, Vec errorv)
00200 {
00201     removeRow(row,target,weight);
00202     getOutputAndError(outputv, errorv);
00203 }
00204 
00205 void RegressionTreeLeave::getOutputAndError(Vec& output, Vec& error)const
00206 {
00207     real conf = 0;
00208     if(length_>0){
00209         output[0] = weighted_targets_sum / weights_sum;
00210         if (!RTR_HAVE_MISSING || missing_leave != true)
00211         {
00212             //we put the most frequent case first as an optimisation
00213             conf = 1.0;
00214             error[0] = ((weights_sum * output[0] * output[0]) - 
00215                         (2.0 * weighted_targets_sum * output[0]) + weighted_squared_targets_sum)
00216                 * loss_function_factor;
00217             if (error[0] < 1E-10) {error[0] = 0.0;} //PLWARNING("E[0] <1e-10: %f",error[0]);}
00218             error[1] = 0.0;
00219             real weights_sum_factor  = weights_sum * loss_function_factor;
00220             if (error[0] > weights_sum_factor) error[2] = weights_sum_factor;
00221             else error[2] = error[0];
00222         }
00223         else
00224         {
00225             error[0] = 0.0;
00226             error[1] = weights_sum;
00227             error[2] = 0.0;
00228         }
00229     }else{
00230         output[0] = MISSING_VALUE;
00231         error.clear();
00232     }
00233     if(output_confidence_target) output[1] = conf;
00234 }
00235 
00236 TVec<string> RegressionTreeLeave::getOutputNames() const
00237 {
00238     TVec<string> ret;
00239     ret.append("val_pred");
00240     if(output_confidence_target)
00241         ret.append("confidence");
00242     return ret;
00243 }
00244 
00245 void RegressionTreeLeave::printStats()
00246 {
00247     cout << " l " << length_;
00248     Vec output(2);
00249     Vec error(3);
00250     getOutputAndError(output,error);
00251     cout << " o0 " << output[0];
00252     if(output_confidence_target)
00253         cout << " o1 " << output[1];
00254     cout << " e0 " << error[0];
00255     cout << " e1 " << error[1];
00256     cout << " ws " << weights_sum;
00257     cout << " ts " << targets_sum;
00258     cout << " wts " << weighted_targets_sum;
00259     cout << " wsts " << weighted_squared_targets_sum;
00260     cout << " wts/ws " <<weighted_targets_sum/weights_sum;
00261     cout << " wsts/ws "<<weighted_squared_targets_sum/weights_sum;
00262     cout << " sqrt(wsts/ws) "<<sqrt(weighted_squared_targets_sum/weights_sum);
00263     cout << endl;
00264 }
00265 bool RegressionTreeLeave::uniqTarget(){
00266     if(classname()=="RegressionTreeLeave"){
00267         real wts_w = weighted_targets_sum/weights_sum;
00268         real wsts_w= sqrt(weighted_squared_targets_sum/weights_sum);
00269         return fast_is_equal(wts_w,wsts_w);
00270     }else
00271         PLERROR("In RegressionTreeLeave::uniqTarget subclass must reimplement it.");
00272 }
00273 
00274 void RegressionTreeLeave::addLeave(PP<RegressionTreeLeave> leave){
00275     if(leave->classname()=="RegressionTreeLeave" && classname()=="RegressionTreeLeave"){
00276         length_ += leave->length_;
00277         weights_sum += leave->weights_sum;
00278         targets_sum += leave->targets_sum;
00279         weighted_targets_sum += leave->weighted_targets_sum;
00280         weighted_squared_targets_sum += leave->weighted_squared_targets_sum;        
00281     }else
00282         PLERROR("In RegressionTreeLeave::addLeave subclass %s or %s must reimplement it.",
00283                 classname().c_str(), leave->classname().c_str());
00284 }
00285 
00286 void RegressionTreeLeave::removeLeave(PP<RegressionTreeLeave> leave){
00287     if(leave->classname()=="RegressionTreeLeave" && classname()=="RegressionTreeLeave"){
00288         length_ -= leave->length_;
00289         weights_sum -= leave->weights_sum;
00290         targets_sum -= leave->targets_sum;
00291         weighted_targets_sum -= leave->weighted_targets_sum;
00292         weighted_squared_targets_sum -= leave->weighted_squared_targets_sum;
00293     }else
00294         PLERROR("In RegressionTreeLeave::removeLeave subclass %s or %s must reimplement it.",
00295                 classname().c_str(), leave->classname().c_str());
00296 }
00297 
00298 
00299 void RegressionTreeLeave::verbose(string the_msg, int the_level)
00300 {
00301     if (verbosity >= the_level)
00302         cout << the_msg << endl;
00303 }
00304 
00305 } // end of namespace PLearn
00306 
00307 
00308 /*
00309   Local Variables:
00310   mode:c++
00311   c-basic-offset:4
00312   c-file-style:"stroustrup"
00313   c-file-offsets:((innamespace . 0)(inline-open . 0))
00314   indent-tabs-mode:nil
00315   fill-column:79
00316   End:
00317 */
00318 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines