PLearn 0.1
RegressionTree.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // RegressionTree.cc
00004 // Copyright (c) 1998-2002 Pascal Vincent
00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal
00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 // 
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 // 
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 // 
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 // 
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 // 
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 
00037 /* ********************************************************************************    
00038  * $Id: RegressionTree.cc, v 1.0 2004/07/19 10:00:00 Bengio/Kegl/Godbout        *
00039  * This file is part of the PLearn library.                                     *
00040  ******************************************************************************** */
00041 
00042 #include "RegressionTree.h"
00043 #include "RegressionTreeQueue.h"
00044 #include "RegressionTreeLeave.h"
00045 #include "RegressionTreeRegisters.h"
00046 #include "RegressionTreeNode.h"
00047 
00048 namespace PLearn {
00049 using namespace std;
00050 
00051 PLEARN_IMPLEMENT_OBJECT(RegressionTree,
00052                         "Regression tree algorithm", 
00053                         "Algorithm built to serve as a base regressor for the LocalMedianBoost algorithm.\n"
00054                         "It can also be used as a stand alone learner.\n"
00055                         "It can learn from a weighted train set to represent different distribution on the training set.\n"
00056                         "It can separate a confidence fonction from the output whenmaking a prediction.\n"
00057                         "At each node expansion, it splits the node to maximize the improvement of an objective function\n"
00058                         "with the mean square error and a facto of the confidence funtion.\n"
00059                         "At each node expansion, it creates 3 nodes, one to hold any samples with a missing value on the\n"
00060                         "splitting attribute, one for the samples with values less than the value of the splitting attribute\n"
00061                         "and one for the others.\n"
00062     );
00063 
00064 bool RegressionTree::output_confidence_target = false;
00065 
00066 RegressionTree::RegressionTree()     
00067     : missing_is_valid(false),
00068       loss_function_weight(1.0),
00069       maximum_number_of_nodes(400),
00070       compute_train_stats(1),
00071       complexity_penalty_factor(0.0)
00072 {
00073 }
00074 
00075 RegressionTree::~RegressionTree()
00076 {
00077 }
00078 
00079 void RegressionTree::declareOptions(OptionList& ol)
00080 { 
00081     declareOption(ol, "missing_is_valid", &RegressionTree::missing_is_valid, OptionBase::buildoption,
00082                   "If set to 1, missing values will be treated as valid, and missing nodes will be potential for splits.\n");
00083     declareOption(ol, "loss_function_weight", &RegressionTree::loss_function_weight, OptionBase::buildoption,
00084                   "The hyper parameter to balance the error and the confidence factor.\n");
00085     declareOption(ol, "maximum_number_of_nodes", &RegressionTree::maximum_number_of_nodes, OptionBase::buildoption,
00086                   "The maximum number of nodes for this tree.\n"
00087                   "(If less than nstages, nstages will be used).");
00088     declareOption(ol, "compute_train_stats", &RegressionTree::compute_train_stats, OptionBase::buildoption,
00089                   "If set to 1 (the default value) the train statistics are computed.\n"
00090                   "(When using the tree as a base regressor, we dont need the stats and it goes quicker when computations are suppressed).");
00091     declareOption(ol, "complexity_penalty_factor", &RegressionTree::complexity_penalty_factor, OptionBase::buildoption,
00092                   "A factor that is multiplied with the square root of the number of leaves.\n"
00093                   "If the error inprovement for the next split is less than the result, the algorithm proceed to an early stop."
00094                   "(When set to 0.0, the default value, it has no impact).");
00095 
00096     declareStaticOption(ol, "output_confidence_target",
00097                   &RegressionTree::output_confidence_target,
00098                   OptionBase::buildoption,
00099                   "to reload old learner.");
00100 
00101     declareOption(ol, "multiclass_outputs", &RegressionTree::multiclass_outputs, OptionBase::buildoption,
00102                   "A vector of possible output values when solving a multiclass problem.\n"
00103                   "When making a prediction, the tree will adjust the output value of each leave to the closest value provided in this vector.");
00104     declareOption(ol, "leave_template", &RegressionTree::leave_template, OptionBase::buildoption,
00105                   "The template for the leave objects to create.\n");
00106     declareOption(ol, "sorted_train_set", &RegressionTree::sorted_train_set,
00107                   OptionBase::buildoption | OptionBase::nosave, 
00108                   "The train set sorted on all columns. If it is not provided by a\n"
00109                   " wrapping algorithm, it is created at stage 0.\n");
00110       
00111     declareOption(ol, "root", &RegressionTree::root, OptionBase::learntoption,
00112                   "The root node of the tree being built\n");
00113     declareOption(ol, "priority_queue", &RegressionTree::priority_queue, OptionBase::learntoption,
00114                   "The heap to store potential nodes to expand\n");
00115     declareOption(ol, "first_leave", &RegressionTree::first_leave, OptionBase::learntoption,
00116                   "The first leave built with the root containing all train set rows at the beginning\n");
00117     declareOption(ol, "split_cols", &RegressionTree::split_cols,
00118                   OptionBase::learntoption,
00119                   "Contain in order of addition of node the columns used to"
00120                   " split the tree.\n");
00121     declareOption(ol, "split_values", &RegressionTree::split_values,
00122                   OptionBase::learntoption,
00123                   "Contain in order of addition of node the split value.\n");
00124 
00125     declareOption(ol, "first_leave_output", &RegressionTree::tmp_vec,
00126                   OptionBase::learntoption | OptionBase::nosave,
00127                   "DEPRECATED\n");
00128     declareOption(ol, "first_leave_error", &RegressionTree::tmp_vec,
00129                   OptionBase::learntoption | OptionBase::nosave,
00130                   "DEPRECATED\n");
00131 
00132 
00133     inherited::declareOptions(ol);
00134 }
00135 
00136 void RegressionTree::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00137 {
00138     inherited::makeDeepCopyFromShallowCopy(copies);
00139     deepCopyField(missing_is_valid, copies);
00140     deepCopyField(loss_function_weight, copies);
00141     deepCopyField(maximum_number_of_nodes, copies);
00142     deepCopyField(compute_train_stats, copies);
00143     deepCopyField(complexity_penalty_factor, copies);
00144     deepCopyField(multiclass_outputs, copies);
00145 //    deepCopyField(leave_template, copies);We don't need to deepCopy it as we only read it
00146     deepCopyField(sorted_train_set, copies);
00147     deepCopyField(root, copies);
00148     deepCopyField(priority_queue, copies);
00149     deepCopyField(first_leave, copies);
00150     deepCopyField(split_cols, copies);
00151     deepCopyField(split_values, copies);
00152     //deepCopyField(tmp_vec, copies); not needed as we don't use it.
00153     
00154 }
00155 
00156 void RegressionTree::build()
00157 {
00158     inherited::build();
00159     build_();
00160 }
00161 
00162 void RegressionTree::build_()
00163 {
00164     PLCHECK(maximum_number_of_nodes<=std::numeric_limits<RTR_type_id>::max());
00165 
00166     PP<VMatrix> the_train_set;
00167     if(sorted_train_set)
00168     {
00169         the_train_set = sorted_train_set;
00170     }
00171     else if (train_set)
00172     { 
00173         the_train_set = train_set;
00174     }
00175     if(the_train_set)
00176     {
00177         length = the_train_set->length();
00178         int inputsize = the_train_set->inputsize();
00179         int targetsize = the_train_set->targetsize();
00180         int weightsize = the_train_set->weightsize();
00181 
00182         if (length < 1)
00183             PLERROR("RegressionTree: the training set must contain at least one"
00184                     " sample, got %d", length);
00185         if (inputsize < 1)
00186             PLERROR("RegressionTree: expected  inputsize greater than 0, got %d",
00187                     inputsize);
00188         if (targetsize != 1)
00189             PLERROR("RegressionTree: expected targetsize to be 1,"" got %d",
00190                     targetsize);
00191         if (weightsize != 1 && weightsize != 0)
00192             PLERROR("RegressionTree: expected weightsize to be 1 or 0, got %d",
00193                     weightsize);
00194     }
00195 
00196     nodes = new TVec<PP<RegressionTreeNode> >();
00197     tmp_computeCostsFromOutput.resize(outputsize());
00198     
00199     if (loss_function_weight != 0.0)
00200     {
00201         l2_loss_function_factor = 2.0 / pow(loss_function_weight, 2);
00202         l1_loss_function_factor = 2.0 / loss_function_weight;
00203     }
00204     else
00205     {
00206         l2_loss_function_factor = 1.0;
00207         l1_loss_function_factor = 1.0;
00208     }
00209 }
00210 
00211 void RegressionTree::train()
00212 {
00213     Profiler::pl_profile_start("RegressionTree::train");
00214 
00215     if(std::numeric_limits<RTR_type_id>::max() < nstages*(missing_is_valid?9:6))
00216         PLERROR("The type of RTR_type_id(%s) doesn't have enought capacity","RTR_type_id");
00217 
00218     if (stage == 0) initialiseTree();
00219     PP<ProgressBar> pb;
00220     if (report_progress)
00221     {
00222         pb = new ProgressBar("RegressionTree : train stages: ", nstages);
00223     }
00224     for (; stage < nstages; stage++)
00225     {    
00226         if (stage > 0)
00227         {
00228             PP<RegressionTreeNode> node= expandTree();
00229             if (node == NULL) break;
00230             split_cols.append(node->getSplitCol());
00231             split_values.append(node->getSplitValue());
00232         }
00233         if (report_progress) pb->update(stage);
00234     }
00235     pb = NULL;
00236 #ifndef _OPENMP
00237     verbose("split_cols: "+tostring(split_cols),2);
00238     verbose("split_values: "+tostring(split_values),2);
00239 #endif
00240     if (compute_train_stats < 1){
00241         Profiler::pl_profile_end("RegressionTree::train");
00242         return;
00243     }
00244     if (report_progress)
00245     {
00246         pb = new ProgressBar("RegressionTree : computing the statistics: ", length);
00247     } 
00248     train_stats->forget();
00249 
00250     real sample_weight;
00251     Vec sample_input(sorted_train_set->inputsize());
00252     Vec sample_output(outputsize());
00253     Vec sample_target(sorted_train_set->targetsize());
00254     Vec sample_costs(nTestCosts());
00255 
00256     for (int train_sample_index = 0; train_sample_index < length;
00257          train_sample_index++)
00258     {  
00259         sorted_train_set->getExample(train_sample_index, sample_input, sample_target, sample_weight);
00260         computeOutputAndCosts(sample_input,sample_target,sample_output,sample_costs);
00261         train_stats->update(sample_costs);
00262         if (report_progress) pb->update(train_sample_index);
00263     }
00264     train_stats->finalize();
00265 
00266     Profiler::pl_profile_end("RegressionTree::train");
00267 }
00268 
00269 void RegressionTree::verbose(string the_msg, int the_level)
00270 {
00271     if (verbosity >= the_level)
00272         pout << the_msg << endl;
00273 }
00274 
00275 void RegressionTree::finalize()
00276 {
00277     inherited::finalize();
00278     root->finalize();
00279     priority_queue = 0;
00280     split_cols = TVec<int>();
00281     split_values = Vec();
00282 //    leave_template = 0; we need it to reload a saved learner.
00283     first_leave = 0;
00284     //we should not finalize the train_set and the sorted_train_set here 
00285     //as AdaBoost share it between different weak_learners!
00286     //AdaBoost will finalize.
00287 //    if(sorted_train_set)
00288 //        sorted_train_set->finalize();
00289 //    if(train_set->classname()=="RegressionTreeRegisters")
00290 //        ((PP<RegressionTreeRegisters>)train_set)->finalize();
00291 }
00292 
00293 void RegressionTree::forget()
00294 {
00295     stage = 0;
00296 }
00297 
00298 void RegressionTree::initialiseTree()
00299 {
00300     if (!sorted_train_set && train_set->classname()=="RegressionTreeRegisters")
00301     {
00302         sorted_train_set=(PP<RegressionTreeRegisters>)train_set;
00303         sorted_train_set->reinitRegisters();
00304     }
00305     else if(!sorted_train_set)
00306         sorted_train_set = new RegressionTreeRegisters(train_set,
00307                                                        report_progress,
00308                                                        verbosity);
00309     else
00310     {
00311         sorted_train_set->reinitRegisters();
00312     }
00313     //Set value common value of all leave
00314     // for optimisation, by default they aren't missing leave
00315     leave_template->missing_leave = 0;
00316     leave_template->loss_function_weight = loss_function_weight;
00317     leave_template->verbosity = verbosity;
00318     leave_template->initStats();
00319 
00320     first_leave = ::PLearn::deepCopy(leave_template);
00321     first_leave->initLeave(sorted_train_set, sorted_train_set->getNextId());
00322 
00323     for (int train_sample_index = 0; train_sample_index < length;
00324          train_sample_index++)
00325     {
00326         first_leave->addRow(train_sample_index);
00327         first_leave->registerRow(train_sample_index);
00328     }
00329     root = new RegressionTreeNode(missing_is_valid);
00330     root->initNode(this, first_leave);
00331     root->lookForBestSplit();
00332 
00333     if (maximum_number_of_nodes < nstages) maximum_number_of_nodes = nstages;
00334     priority_queue = new RegressionTreeQueue(verbosity,maximum_number_of_nodes);
00335     priority_queue->addHeap(root);
00336 }
00337 
00338 PP<RegressionTreeNode> RegressionTree::expandTree()
00339 {
00340     if (priority_queue->isEmpty() <= 0)
00341     {
00342         verbose("RegressionTree: priority queue empty, stage: " + tostring(stage), 3);
00343         return NULL;
00344     }
00345     PP<RegressionTreeNode> node = priority_queue->popHeap();
00346     if (node->getErrorImprovment() < complexity_penalty_factor * sqrt((real)stage))
00347     {
00348         verbose("RegressionTree: early stopping at stage: " + tostring(stage)
00349                 + ", error improvement: " + tostring(node->getErrorImprovment())
00350                 + ", penalty: " + tostring(complexity_penalty_factor * sqrt((real)stage)), 3);
00351         return NULL;
00352     }
00353     int split_col = node->expandNode();
00354     if (split_col < 0)
00355     {
00356         verbose("RegressionTree: expand is negative?", 3);
00357         return NULL;
00358     }
00359 
00360     priority_queue->addHeap(node->left_node); 
00361     priority_queue->addHeap(node->right_node);
00362     if (missing_is_valid) priority_queue->addHeap(node->missing_node);
00363     return node; 
00364 }
00365 
00366 TVec<string> RegressionTree::getTrainCostNames() const
00367 {
00368     TVec<string> return_msg(5);
00369     return_msg[0] = "mse";
00370     return_msg[1] = "base_confidence";
00371     return_msg[2] = "base_reward_l2";
00372     return_msg[3] = "base_reward_l1";
00373     return_msg[4] = "class_error";
00374     return return_msg;
00375 }
00376 
00377 TVec<string> RegressionTree::getTestCostNames() const
00378 { 
00379     TVec<string> costs=getTrainCostNames();
00380     PP<VMatrix> the_train_set=train_set;
00381     if(sorted_train_set)
00382         the_train_set = sorted_train_set;
00383 
00384     PLCHECK_MSG(the_train_set,"In RegressionTree::getTestCostNames() - "
00385                 "a train set is needed!");
00386     for(int i=0;i<the_train_set->inputsize();i++)
00387     {
00388         costs.append("SPLIT_VAR_"+the_train_set->fieldName(i));
00389     }
00390     return costs;
00391 }
00392 
00393 TVec<string> RegressionTree::getOutputNames() const
00394 {
00395     return leave_template->getOutputNames();
00396 }
00397 
00398 PP<RegressionTreeRegisters> RegressionTree::getSortedTrainingSet() const
00399 {
00400     return sorted_train_set;
00401 }
00402 
00403 void RegressionTree::computeOutput(const Vec& inputv, Vec& outputv) const
00404 {
00405     computeOutputAndNodes(inputv, outputv);
00406 }
00407 
00408 void RegressionTree::computeOutputAndNodes(const Vec& inputv, Vec& outputv,
00409                                            TVec<PP<RegressionTreeNode> >* nodes) const
00410 {
00411     root->computeOutputAndNodes(inputv, outputv, nodes);
00412     return;
00413 }
00414 
00415 void RegressionTree::computeOutputAndCosts(const Vec& input,
00416                                            const Vec& target,
00417                                            Vec& output, Vec& costs) const
00418 {
00419     PLASSERT(costs.size()==nTestCosts());
00420     PLASSERT(nodes);
00421     nodes->resize(0);
00422 
00423     computeOutputAndNodes(input, output, nodes);
00424 
00425     computeCostsFromOutputsAndNodes(input, output, target, *nodes, costs);
00426 }
00427 
00428 void RegressionTree::computeCostsFromOutputsAndNodes(const Vec& input,
00429                                                      const Vec& output, 
00430                                                      const Vec& target,
00431                                                      const TVec<PP<RegressionTreeNode> >& nodes,
00432                                                      Vec& costs) const
00433 {
00434     costs.clear();
00435     costs[0] = pow((output[0] - target[0]), 2);
00436     if(leave_template->output_confidence_target) costs[1] = output[1];
00437     else costs[1] = MISSING_VALUE;
00438     costs[2] = 1.0 - (l2_loss_function_factor * costs[0]);
00439     costs[3] = 1.0 - (l1_loss_function_factor * abs(output[0] - target[0]));
00440     costs[4] = !fast_is_equal(target[0],output[0]);
00441 
00442     for(int i=0;i<nodes.length();i++)
00443         costs[5+nodes[i]->getSplitCol()]++;
00444 }
00445 
00446 void RegressionTree::computeCostsFromOutputs(const Vec& input,
00447                                              const Vec& output, 
00448                                              const Vec& target,
00449                                              Vec& costs) const
00450 {
00451     computeOutputAndCosts(input, target, tmp_computeCostsFromOutput, costs); 
00452     PLASSERT(output==tmp_computeCostsFromOutput);
00453 }
00454 
00455 } // end of namespace PLearn
00456 
00457 
00458 /*
00459   Local Variables:
00460   mode:c++
00461   c-basic-offset:4
00462   c-file-style:"stroustrup"
00463   c-file-offsets:((innamespace . 0)(inline-open . 0))
00464   indent-tabs-mode:nil
00465   fill-column:79
00466   End:
00467 */
00468 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines