PLearn 0.1
RegressionTreeNode.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // RegressionTreeNode.cc
00004 // Copyright (c) 1998-2002 Pascal Vincent
00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal
00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 // 
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 // 
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 // 
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 // 
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 // 
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 
00037 /* ********************************************************************************    
00038  * $Id: RegressionTreeNode.cc, v 1.0 2004/07/19 10:00:00 Bengio/Kegl/Godbout    *
00039  * This file is part of the PLearn library.                                     *
00040  ******************************************************************************** */
00041 #define PL_LOG_MODULE_NAME "RegressionTreeNode"
00042 #include <plearn/io/pl_log.h>
00043 
00044 #include "RegressionTreeNode.h"
00045 #include "RegressionTreeRegisters.h"
00046 #include "RegressionTreeLeave.h"
00047 
00048 namespace PLearn {
00049 using namespace std;
00050 
00051 PLEARN_IMPLEMENT_OBJECT(RegressionTreeNode,
00052                         "Object to represent the nodes of a regression tree.",
00053                         "It may be a final node pointing to a leave.\n"
00054                         "If that is the case, it knows always what would be the best possible split for that leave.\n"
00055                         "It may be an expanded node pointing to 3 children nodes: a leave for missing values on the splitting attribute,\n"
00056                         "a left leave for samples with values below the value of the splitting attribute, and a right leave for the others,\n"
00057     );
00058 
00059 int RegressionTreeNode::dummy_int = 0;
00060 Vec RegressionTreeNode::tmp_vec;
00061 PP<RegressionTreeLeave> RegressionTreeNode::dummy_leave_template;
00062 PP<RegressionTreeRegisters> RegressionTreeNode::dummy_train_set;
00063 
00064 RegressionTreeNode::RegressionTreeNode():
00065     missing_is_valid(0),
00066     split_col(-1),
00067     split_balance(INT_MAX),
00068     split_feature_value(REAL_MAX),
00069     after_split_error(REAL_MAX)
00070 {
00071     build();
00072 }
00073 RegressionTreeNode::RegressionTreeNode(int missing_is_valid_):
00074     missing_is_valid(missing_is_valid_),
00075     split_col(-1),
00076     split_balance(INT_MAX),
00077     split_feature_value(REAL_MAX),
00078     after_split_error(REAL_MAX)
00079 {
00080     build();
00081 }
00082 
00083 RegressionTreeNode::~RegressionTreeNode()
00084 {
00085 }
00086 
00087 void RegressionTreeNode::finalize(){
00088     //those variable are not needed after training.
00089     right_leave = 0;
00090     left_leave = 0;
00091     leave = 0;
00092     //missing_leave used in computeOutputsAndNodes
00093     if(right_node)
00094         right_node->finalize();
00095     if(left_node)
00096         left_node->finalize();
00097     if(missing_node)
00098         missing_node->finalize();
00099 }
00100 
00101 void RegressionTreeNode::declareOptions(OptionList& ol)
00102 { 
00103     declareOption(ol, "missing_is_valid", &RegressionTreeNode::missing_is_valid, OptionBase::buildoption,
00104                   "If set to 1, missing values will be treated as valid, and missing nodes will be potential for splits.\n");
00105     declareOption(ol, "leave", &RegressionTreeNode::leave, OptionBase::buildoption,
00106                   "The leave of all the  belonging rows when this node is a leave\n");
00107 
00108     declareOption(ol, "leave_output", &RegressionTreeNode::leave_output, OptionBase::learntoption,
00109                   "The leave output vector\n");
00110     declareOption(ol, "leave_error", &RegressionTreeNode::leave_error, OptionBase::learntoption,
00111                   "The leave error vector\n");
00112     declareOption(ol, "split_col", &RegressionTreeNode::split_col, OptionBase::learntoption,
00113                   "The dimension of the best split of leave\n");
00114     declareOption(ol, "split_balance", &RegressionTreeNode::split_balance, OptionBase::learntoption,
00115                   "The balance between the left and the right leave\n");
00116     declareOption(ol, "split_feature_value", &RegressionTreeNode::split_feature_value, OptionBase::learntoption,
00117                   "The feature value of the split\n");
00118     declareOption(ol, "after_split_error", &RegressionTreeNode::after_split_error, OptionBase::learntoption,
00119                   "The error after split\n");
00120     declareOption(ol, "missing_node", &RegressionTreeNode::missing_node, OptionBase::learntoption,
00121                   "The node for the missing values when missing_is_valid is set to 1\n");
00122     declareOption(ol, "missing_leave", &RegressionTreeNode::missing_leave, OptionBase::learntoption,
00123                   "The leave containing rows with missing values after split\n");
00124     declareOption(ol, "left_node", &RegressionTreeNode::left_node, OptionBase::learntoption,
00125                   "The node on the left of the split decision\n");
00126     declareOption(ol, "left_leave", &RegressionTreeNode::left_leave, OptionBase::learntoption,
00127                   "The leave with the rows lower than the split feature value after split\n");
00128     declareOption(ol, "right_node", &RegressionTreeNode::right_node, OptionBase::learntoption,
00129                   "The node on the right of the split decision\n"); 
00130     declareOption(ol, "right_leave", &RegressionTreeNode::right_leave, OptionBase::learntoption,
00131                   "The leave with the rows greater thean the split feature value after split\n");
00132 
00133     declareStaticOption(ol, "left_error", &RegressionTreeNode::tmp_vec,
00134                   OptionBase::learntoption | OptionBase::nosave,
00135                   "DEPRECATED The left leave error vector\n");
00136     declareStaticOption(ol, "right_error", &RegressionTreeNode::tmp_vec,
00137                   OptionBase::learntoption | OptionBase::nosave,
00138                   "DEPRECATED The right leave error vector\n");
00139     declareStaticOption(ol, "missing_error", &RegressionTreeNode::tmp_vec,
00140                   OptionBase::learntoption | OptionBase::nosave,
00141                   "DEPRECATED The missing leave error vector\n");
00142     declareStaticOption(ol, "left_output", &RegressionTreeNode::tmp_vec,
00143                   OptionBase::learntoption | OptionBase::nosave,
00144                   "DEPRECATED The left leave output vector\n");
00145     declareStaticOption(ol, "right_output", &RegressionTreeNode::tmp_vec,
00146                   OptionBase::learntoption | OptionBase::nosave,
00147                   "DEPRECATED The right leave output vector\n");
00148     declareStaticOption(ol, "missing_output", &RegressionTreeNode::tmp_vec,
00149                   OptionBase::learntoption | OptionBase::nosave,
00150                   "DEPRECATED The mising leave output vector\n");
00151 
00152     declareStaticOption(ol, "right_leave_id", &RegressionTreeNode::dummy_int,
00153                   OptionBase::learntoption | OptionBase::nosave,
00154                   "DEPRECATED The id of the right leave\n");     
00155     declareStaticOption(ol, "left_leave_id", &RegressionTreeNode::dummy_int,
00156                   OptionBase::learntoption | OptionBase::nosave,
00157                   "DEPRECATED The id of the left leave\n");
00158     declareStaticOption(ol, "missing_leave_id", &RegressionTreeNode::dummy_int,
00159                   OptionBase::learntoption | OptionBase::nosave,
00160                   "DEPRECATED The id of the missing leave\n");
00161     declareStaticOption(ol, "leave_id", &RegressionTreeNode::dummy_int,
00162                   OptionBase::learntoption | OptionBase::nosave,
00163                   "DEPRECATED The id of the leave\n");
00164     declareStaticOption(ol, "length", &RegressionTreeNode::dummy_int,
00165                   OptionBase::learntoption | OptionBase::nosave,
00166                   "DEPRECATED The length of the train set\n");
00167     declareStaticOption(ol, "inputsize", &RegressionTreeNode::dummy_int,
00168                   OptionBase::learntoption | OptionBase::nosave,
00169                   "DEPRECATED The inputsize of the train set\n");
00170     declareStaticOption(ol, "inputsize", &RegressionTreeNode::dummy_int,
00171                   OptionBase::learntoption | OptionBase::nosave,
00172                   "DEPRECATED The inputsize of the train set\n");
00173     declareStaticOption(ol, "loss_function_weight", 
00174                   &RegressionTreeNode::dummy_int,
00175                   OptionBase::learntoption | OptionBase::nosave,
00176                   "DEPRECATED Only to reload old saved learner\n");
00177     declareStaticOption(ol, "verbosity", 
00178                   &RegressionTreeNode::dummy_int,
00179                   OptionBase::learntoption | OptionBase::nosave,
00180                   "DEPRECATED Only to reload old saved learner\n");
00181     declareStaticOption(ol, "leave_template", 
00182                   &RegressionTreeNode::dummy_leave_template,
00183                   OptionBase::learntoption | OptionBase::nosave,
00184                   "DEPRECATED Only to reload old saved learner\n");
00185     declareStaticOption(ol, "train_set", 
00186                   &RegressionTreeNode::dummy_train_set,
00187                   OptionBase::learntoption | OptionBase::nosave,
00188                   "DEPRECATED Only to reload old saved learner\n");
00189 
00190     inherited::declareOptions(ol);
00191 }
00192 
00193 void RegressionTreeNode::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00194 {
00195     inherited::makeDeepCopyFromShallowCopy(copies);
00196     
00197     //not done as the template don't change
00198     deepCopyField(leave, copies);
00199     deepCopyField(leave_output, copies);
00200     deepCopyField(leave_error, copies);
00201 
00202     deepCopyField(missing_node, copies);
00203     deepCopyField(missing_leave, copies);
00204     deepCopyField(left_node, copies);
00205     deepCopyField(left_leave, copies);
00206     deepCopyField(right_node, copies);
00207     deepCopyField(right_leave, copies);
00208 }
00209 
00210 void RegressionTreeNode::build()
00211 {
00212     inherited::build();
00213     build_();
00214 }
00215 
00216 void RegressionTreeNode::build_()
00217 {
00218 }
00219 
00220 void RegressionTreeNode::initNode(PP<RegressionTree> the_tree,
00221                                   PP<RegressionTreeLeave> the_leave)
00222 {
00223     tree=the_tree;
00224     leave=the_leave;
00225     PP<RegressionTreeRegisters> the_train_set = tree->getSortedTrainingSet();
00226     PP<RegressionTreeLeave> leave_template = tree->leave_template;
00227     int missing_leave_id = the_train_set->getNextId();
00228     int left_leave_id =  the_train_set->getNextId();
00229     int right_leave_id =  the_train_set->getNextId();
00230 
00231     missing_leave = ::PLearn::deepCopy(leave_template);
00232     missing_leave->initLeave(the_train_set, missing_leave_id, missing_is_valid);
00233 
00234     left_leave = ::PLearn::deepCopy(leave_template);
00235     left_leave->initLeave(the_train_set, left_leave_id);
00236 
00237     right_leave = ::PLearn::deepCopy(leave_template);
00238     right_leave->initLeave(the_train_set, right_leave_id);
00239 
00240     leave_output.resize(leave_template->outputsize());
00241     leave_error.resize(3);
00242 
00243     leave->getOutputAndError(leave_output,leave_error);
00244 
00245     //we do it here as an optimization
00246     //this don't change the leave_error.
00247     //If you want the leave_error to include this rounding, 
00248     // use the RegressionTreeMultiVlassLeave
00249     Vec multiclass_outputs = tree->multiclass_outputs;
00250     if (multiclass_outputs.length() <= 0) return;
00251     real closest_value=multiclass_outputs[0];
00252     real margin_to_closest_value=abs(leave_output[0] - multiclass_outputs[0]);
00253     for (int value_ind = 1; value_ind < multiclass_outputs.length(); value_ind++)
00254     {
00255         real v=abs(leave_output[0] - multiclass_outputs[value_ind]);
00256         if (v < margin_to_closest_value)
00257         {
00258             closest_value = multiclass_outputs[value_ind];
00259             margin_to_closest_value = v;
00260         }
00261     }
00262     leave_output[0] = closest_value;
00263 }
00264 
00265 //#define RCMP
00266 void RegressionTreeNode::lookForBestSplit()
00267 {
00268     if(leave->length()<=1)
00269         return;
00270     TVec<RTR_type> candidate(0, leave->length());//list of candidate row to split
00271     TVec<RTR_type> registered_row(leave->length());
00272     TVec<pair<RTR_target_t,RTR_weight_t> > registered_target_weight(leave->length());
00273     registered_target_weight.resize(leave->length());
00274     registered_target_weight.resize(0);
00275     Vec registered_value(0, leave->length());
00276     tmp_vec.resize(leave->outputsize());
00277     Vec left_error(3);
00278     Vec right_error(3);
00279     Vec missing_error(3);
00280     missing_error.clear();
00281     PP<RegressionTreeRegisters> train_set = tree->getSortedTrainingSet();
00282     bool one_pass_on_data=!train_set->haveMissing();
00283 
00284     int inputsize = train_set->inputsize();
00285 #ifdef RCMP
00286     Vec row_split_err(inputsize);
00287     Vec row_split_value(inputsize);
00288     Vec row_split_balance(inputsize);
00289     row_split_err.clear();
00290     row_split_value.clear();
00291     row_split_balance.clear();
00292 #endif
00293     int leave_id = leave->getId();
00294     
00295     int l_length = 0;
00296     real l_weights_sum = 0;
00297     real l_targets_sum = 0;
00298     real l_weighted_targets_sum = 0;
00299     real l_weighted_squared_targets_sum = 0;
00300 
00301     for (int col = 0; col < inputsize; col++)
00302     {
00303         missing_leave->initStats();
00304         left_leave->initStats();
00305         right_leave->initStats();
00306         
00307         PLASSERT(registered_row.size()==leave->length());
00308         PLASSERT(candidate.size()==0);
00309         tuple<real,real,int> ret;
00310 #ifdef NPREFETCH
00311         //The ifdef is in case we don't want to use the optimized version with
00312         //prefetch of memory. Maybe the optimization is hurtfull for some computer.
00313         train_set->getAllRegisteredRow(leave_id, col, registered_row,
00314                                        registered_target_weight,
00315                                        registered_value);
00316 
00317         PLASSERT(registered_row.size()==leave->length());
00318         PLASSERT(candidate.size()==0);
00319 
00320         //we do this optimization in case their is many row with the same value
00321         //at the end as with binary variable.
00322         int row_idx_end = registered_row.size() - 1;
00323         int prev_row=registered_row[row_idx_end];
00324         real prev_val=registered_value[row_idx_end];
00325         for( ;row_idx_end>0;row_idx_end--)
00326         {
00327             int row=prev_row;
00328             real val=prev_val;
00329             prev_row = registered_row[row_idx_end - 1];
00330             prev_val = registered_value[row_idx_end - 1];
00331             if (RTR_HAVE_MISSING && is_missing(val))
00332                 missing_leave->addRow(row, registered_target_weight[row_idx_end].first,
00333                                       registered_target_weight[row_idx_end].second);
00334             else if(val==prev_val)
00335                 right_leave->addRow(row, registered_target_weight[row_idx_end].first,
00336                                     registered_target_weight[row_idx_end].second);
00337             else
00338                 break;
00339         }
00340 
00341         for(int row_idx = 0;row_idx<=row_idx_end;row_idx++)
00342         {
00343             int row=registered_row[row_idx];
00344             if (RTR_HAVE_MISSING && is_missing(registered_value[row_idx]))
00345                 missing_leave->addRow(row, registered_target_weight[row_idx].first,
00346                                       registered_target_weight[row_idx].second);
00347             else {
00348                 left_leave->addRow(row, registered_target_weight[row_idx].first,
00349                                    registered_target_weight[row_idx].second);
00350                 candidate.append(row);
00351             }
00352         }
00353 
00354         missing_leave->getOutputAndError(tmp_vec, missing_error);
00355         ret=bestSplitInRow(col, candidate, left_error,
00356                            right_error, missing_error,
00357                            right_leave, left_leave,
00358                            train_set, registered_value,
00359                            registered_target_weight);
00360 
00361 #else
00362         if(!one_pass_on_data){
00363             train_set->getAllRegisteredRowLeave(leave_id, col, registered_row,
00364                                                 registered_target_weight,
00365                                                 registered_value,
00366                                                 missing_leave,
00367                                                 left_leave,
00368                                                 right_leave, candidate);
00369             PLASSERT(registered_target_weight.size()==candidate.size());
00370             PLASSERT(registered_value.size()==candidate.size());
00371             PLASSERT(left_leave->length()+right_leave->length()
00372                      +missing_leave->length()==leave->length());
00373             PLASSERT(candidate.size()>0||(left_leave->length()+right_leave->length()==0));
00374             missing_leave->getOutputAndError(tmp_vec, missing_error);
00375             ret=bestSplitInRow(col, candidate, left_error,
00376                                right_error, missing_error,
00377                                right_leave, left_leave,
00378                                train_set, registered_value,
00379                                registered_target_weight);
00380         }else{
00381             ret=train_set->bestSplitInRow(leave_id, col, registered_row,
00382                                           left_leave,
00383                                           right_leave, left_error,
00384                                           right_error);
00385         }
00386         PLASSERT(registered_row.size()==leave->length());
00387 #endif
00388 
00389         if(col==0){
00390             l_length=left_leave->length()+right_leave->length()+missing_leave->length();
00391             l_weights_sum=left_leave->weights_sum+right_leave->weights_sum+missing_leave->weights_sum;
00392             l_targets_sum=left_leave->targets_sum+right_leave->targets_sum+missing_leave->targets_sum;
00393             l_weighted_targets_sum=left_leave->weighted_targets_sum
00394                 +right_leave->weighted_targets_sum+missing_leave->weighted_targets_sum;
00395             l_weighted_squared_targets_sum=left_leave->weighted_squared_targets_sum
00396                 +right_leave->weighted_squared_targets_sum+missing_leave->weighted_squared_targets_sum;
00397         }else if(!one_pass_on_data){
00398             PLCHECK(l_length==left_leave->length()+right_leave->length()
00399                     +missing_leave->length());
00400             PLCHECK(fast_is_equal(l_weights_sum,
00401                                   left_leave->weights_sum+right_leave->weights_sum
00402                                   +missing_leave->weights_sum));
00403             PLCHECK(fast_is_equal(l_targets_sum,
00404                                   left_leave->targets_sum+right_leave->targets_sum
00405                                   +missing_leave->targets_sum));
00406             PLCHECK(fast_is_equal(l_weighted_targets_sum,
00407                                   left_leave->weighted_targets_sum
00408                                   +right_leave->weighted_targets_sum
00409                                   +missing_leave->weighted_targets_sum));
00410             PLCHECK(fast_is_equal(l_weighted_squared_targets_sum,
00411                                   left_leave->weighted_squared_targets_sum
00412                                   +right_leave->weighted_squared_targets_sum
00413                                   +missing_leave->weighted_squared_targets_sum));
00414         }
00415 
00416 #ifdef RCMP
00417         row_split_err[col] = get<0>(ret);
00418         row_split_value[col] = get<1>(ret);
00419         row_split_balance[col] = get<2>(ret);
00420 #endif
00421         if (fast_is_more(get<0>(ret), after_split_error)) continue;
00422         else if (fast_is_equal(get<0>(ret), after_split_error) &&
00423                  fast_is_more(get<2>(ret), split_balance)) continue;
00424         else if (fast_is_equal(get<0>(ret), REAL_MAX)) continue;
00425 
00426         split_col = col;
00427         after_split_error = get<0>(ret);
00428         split_feature_value = get<1>(ret);
00429         split_balance = get<2>(ret);
00430         PLASSERT(fast_is_less(after_split_error,REAL_MAX)||split_col==-1);
00431     }
00432     PLASSERT(fast_is_less(after_split_error,REAL_MAX)||split_col==-1);
00433 
00434     EXTREME_MODULE_LOG<<"error after split: "<<after_split_error<<endl;
00435     EXTREME_MODULE_LOG<<"split value: "<<split_feature_value<<endl;
00436     EXTREME_MODULE_LOG<<"split_col: "<<split_col;
00437     if(split_col>=0)
00438         EXTREME_MODULE_LOG<<" "<<train_set->fieldName(split_col);
00439     EXTREME_MODULE_LOG<<endl;
00440 }
00441 
00442 tuple<real,real,int>RegressionTreeNode::bestSplitInRow(
00443     int col,
00444     TVec<RTR_type>& candidates,
00445     Vec left_error,
00446     Vec right_error,
00447     const Vec missing_error,
00448     PP<RegressionTreeLeave> right_leave,
00449     PP<RegressionTreeLeave> left_leave,
00450     PP<RegressionTreeRegisters> train_set,
00451     Vec values,TVec<pair<RTR_target_t,RTR_weight_t> > t_w
00452     )
00453 {
00454     int best_balance=INT_MAX;
00455     real best_feature_value = REAL_MAX;
00456     real best_split_error = REAL_MAX;
00457     //in case of only missing value
00458     if(candidates.size()==0)
00459         return make_tuple(best_feature_value, best_split_error, best_balance);
00460 
00461     int row = candidates.last();
00462     Vec tmp(3);
00463 
00464     real missing_errors = missing_error[0] + missing_error[1];
00465     real first_value=values.first();
00466     real next_feature=values.last();
00467 
00468     //next_feature!=first_value is to check if their is more split point
00469     // in case of binary variable or variable with few different value,
00470     // this give a great speed up.
00471     for(int i=candidates.size()-2;i>=0&&next_feature!=first_value;i--)
00472     {
00473         int next_row = candidates[i];
00474         real row_feature=next_feature;
00475         PLASSERT(is_equal(row_feature,values[i+1]));
00476 //                 ||(is_missing(row_feature)&&is_missing(values[i+1])));
00477         next_feature=values[i];
00478 
00479         real target=t_w[i+1].first;
00480         real weight=t_w[i+1].second;
00481         PLASSERT(train_set->get(next_row, col)==values[i]);
00482         PLASSERT(train_set->get(row, col)==values[i+1]);
00483         PLASSERT(next_feature<=row_feature);
00484 
00485 
00486         left_leave->removeRow(row, target, weight);
00487         right_leave->addRow(row, target, weight);
00488         row = next_row;
00489         if (next_feature < row_feature){
00490             left_leave->getOutputAndError(tmp, left_error);
00491             right_leave->getOutputAndError(tmp, right_error);
00492         }else
00493             continue;
00494         real work_error = missing_errors + left_error[0]
00495             + left_error[1] + right_error[0] + right_error[1];
00496         int work_balance = abs(left_leave->length() -
00497                                right_leave->length());
00498         if (fast_is_more(work_error,best_split_error)) continue;
00499         else if (fast_is_equal(work_error,best_split_error) &&
00500                  fast_is_more(work_balance,best_balance)) continue;
00501 
00502         best_feature_value = 0.5 * (row_feature + next_feature);
00503         best_split_error = work_error;
00504         best_balance = work_balance;
00505 
00506     }
00507     candidates.resize(0);
00508     return make_tuple(best_split_error, best_feature_value, best_balance);
00509 }
00510 
00511 void RegressionTreeNode::compareSplit(int col, real left_leave_last_feature, real right_leave_first_feature,
00512                                       Vec left_error, Vec right_error, Vec missing_error)
00513 {
00514     PLASSERT(left_leave_last_feature<=right_leave_first_feature);
00515     if (left_leave_last_feature >= right_leave_first_feature) return;
00516     real work_error = missing_error[0] + missing_error[1] + left_error[0] + left_error[1] + right_error[0] + right_error[1];
00517     int work_balance = abs(left_leave->length() - right_leave->length());
00518     if (fast_is_more(work_error,after_split_error)) return;
00519     else if (fast_is_equal(work_error,after_split_error) &&
00520              fast_is_more(work_balance,split_balance)) return;
00521 
00522     split_col = col;
00523     split_feature_value = 0.5 * (right_leave_first_feature + left_leave_last_feature);
00524     after_split_error = work_error;
00525     split_balance = work_balance;
00526 }
00527 
00528 int RegressionTreeNode::expandNode()
00529 {
00530     if (split_col < 0)
00531     {
00532         verbose("RegressionTreeNode: there is no more split candidate", 3);
00533         return -1;
00534     }
00535     missing_leave->initStats();
00536     left_leave->initStats();
00537     right_leave->initStats();
00538     TVec<RTR_type>registered_row(leave->length());
00539     PP<RegressionTreeRegisters> train_set = tree->getSortedTrainingSet();
00540     train_set->getAllRegisteredRow(leave->getId(),split_col,registered_row);
00541 
00542     for (int row_index = 0;row_index<registered_row.size();row_index++)
00543     {
00544         int row=registered_row[row_index];
00545         if (RTR_HAVE_MISSING && is_missing(train_set->get(row, split_col)))
00546         {
00547             missing_leave->addRow(row);
00548             missing_leave->registerRow(row);
00549         }
00550         else
00551         {
00552             if (train_set->get(row, split_col) < split_feature_value)
00553             {
00554                 left_leave->addRow(row);
00555                 left_leave->registerRow(row);
00556             }
00557             else
00558             {
00559                 right_leave->addRow(row);
00560                 right_leave->registerRow(row);
00561             }
00562         }
00563     }
00564 
00565     PLASSERT(left_leave->length()>0);
00566     PLASSERT(right_leave->length()>0);
00567     PLASSERT(left_leave->length() + right_leave->length() + 
00568              missing_leave->length() == registered_row.size());
00569 //  leave->printStats();
00570 //  left_leave->printStats();
00571 //  right_leave->printStats();
00572     if (RTR_HAVE_MISSING && missing_is_valid > 0)
00573     {
00574         missing_node = new RegressionTreeNode(missing_is_valid);
00575         missing_node->initNode(tree, missing_leave);
00576         missing_node->lookForBestSplit();
00577     }
00578     left_node = new RegressionTreeNode(missing_is_valid);
00579     left_node->initNode(tree, left_leave);
00580     left_node->lookForBestSplit();
00581     right_node = new RegressionTreeNode(missing_is_valid);
00582     right_node->initNode(tree, right_leave);
00583     right_node->lookForBestSplit();
00584     return split_col;
00585 }
00586 
00587 void RegressionTreeNode::computeOutputAndNodes(const Vec& inputv, Vec& outputv,
00588                                        TVec<PP<RegressionTreeNode> >* nodes)
00589 {
00590     if(nodes)
00591         nodes->append(this);
00592     if (!left_node)
00593     {
00594         outputv << leave_output;
00595         return;
00596     }
00597     if (RTR_HAVE_MISSING && is_missing(inputv[split_col]))
00598     {
00599         if (missing_is_valid > 0)
00600         {
00601             missing_node->computeOutputAndNodes(inputv, outputv, nodes);
00602         }
00603         else
00604         {
00605             tmp_vec.resize(3);
00606             missing_leave->getOutputAndError(outputv,tmp_vec);
00607         }
00608         return;
00609     }
00610     if (inputv[split_col] > split_feature_value)
00611     {
00612         right_node->computeOutputAndNodes(inputv, outputv, nodes);
00613         return;
00614     }
00615     else
00616     {
00617         left_node->computeOutputAndNodes(inputv, outputv, nodes);
00618         return;
00619     }
00620 }
00621 
00622 void RegressionTreeNode::verbose(string the_msg, int the_level)
00623 {
00624     if (tree->verbosity >= the_level)
00625         cout << the_msg << endl;
00626 }
00627 
00628 } // end of namespace PLearn
00629 
00630 
00631 /*
00632   Local Variables:
00633   mode:c++
00634   c-basic-offset:4
00635   c-file-style:"stroustrup"
00636   c-file-offsets:((innamespace . 0)(inline-open . 0))
00637   indent-tabs-mode:nil
00638   fill-column:79
00639   End:
00640 */
00641 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines