PLearn 0.1
LocalMedBoost.h
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // LocalMedBoost.h
00004 // Copyright (c) 1998-2002 Pascal Vincent
00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal
00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 // 
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 // 
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 // 
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 // 
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 // 
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 /* ********************************************************************************    
00037  * $Id: LocalMedBoost.h, v 1.0 2004/07/19 10:00:00 Bengio/Kegl/Godbout          *
00038  * This file is part of the PLearn library.                                     *
00039  ******************************************************************************** */
00040 
00041 #ifndef LocalMedBoost_INC
00042 #define LocalMedBoost_INC
00043 
00044 #include <plearn_learners/generic/PLearner.h>
00045 
00046 namespace PLearn {
00047 using namespace std;
00048 class RegressionTree;
00049 class BaseRegressorWrapper;
00050 class RegressionTreeRegisters;
00051 
00052 class LocalMedBoost: public PLearner
00053 {
00054     typedef PLearner inherited;
00055 private:
00056 
00057 /*
00058   Build options: they have to be set before training
00059 */
00060 
00061     real robustness;                                   // robustness parameter of the boosting algorithm
00062     real adapt_robustness_factor;                      // if not 0.0, robustness will be adapted at each stage with max(t)min(i) base_award + this constant
00063     real loss_function_weight;                         // hyper-parameter to balance the error and the confidence factor
00064     string objective_function;                         // indicates to used l2 or flaten_l2 as base regressor objective function
00065     int regression_tree;                               // indicator to use the tree_regressor_template if set to 1, and the base_regressor_template otherwise
00066     int max_nstages;                                   // maximum number of nstages in the hyper learner to size the vectors of base learners
00067     PP<PLearner> base_regressor_template;              // template for a generic regressor as the base learner to be boosted
00068     PP<RegressionTree> tree_regressor_template;        // template for a tree regressor to be boosted as the base regressor
00069     PP<BaseRegressorWrapper> tree_wrapper_template;    // template for a tree regressor to be boosted thru a wrapper for a different confidence function
00070   
00071 /*
00072   Learnt options: they are sized and initialized if need be, at stage 0
00073 */
00074 
00075     int end_stage;                                      // last train stage after end of training
00076     real bound;                                         // cumulative bound computed after each boosting stage
00077     real maxt_base_award;                               // max(t)min(i) base_award to adapt robustness at each stage
00078     PP<RegressionTreeRegisters> sorted_train_set;       // a sorted train set when using a tree as a base regressor
00079     TVec< PP<PLearner> > base_regressors;               // base regressors built at each boosting stage 
00080     TVec< PP<RegressionTree> > tree_regressors;         // tree regressors built at each boosting stage
00081     TVec< PP<BaseRegressorWrapper> > tree_wrappers;     // tree regressors built at each boosting stage
00082     TVec<real> function_weights;                        // array of function weights built by the boosting algorithm 
00083     TVec<real> loss_function;                           // array of the loss function
00084     TVec<real> sample_weights;                          // array to represent different distributions on the samples of the training set
00085  
00086 /*
00087   Work fields: they are sized and initialized if need be, at buid time
00088 */  
00089 
00090     int each_train_sample_index;                   // index to go thru the train set  
00091     int length;                                    // number of samples in train set
00092     int width;                                     // number of columns in train set
00093     int inputsize;                                 // input size of train set
00094     int targetsize;                                // output size of train set
00095     int weightsize;                                // weightsize size of train set
00096     bool capacity_too_large;                       // early stop of the algorithm because capacity is too large
00097     bool capacity_too_small;                       // early stop of the algorithm because capacity is too small
00098     real edge;                                     // computed sum of the weighted base rewards
00099     real min_margin;                               // minimum margin for all samples
00100     Vec sample_input;                              // vector to hold the sample input vector
00101     Vec sample_target;                             // vector to hold the sample target
00102     real sample_weight;                            // real to hold the sample weight
00103     Vec sample_output;                             // vector to compute output from the base regressor and from boosting
00104     Vec sample_costs;                              // vector to compute cost from boosting for one sample
00105     TVec<real> base_rewards;                       // vector to compute the base reward of all samples after each boosting stage
00106     TVec<real> base_confidences;                   // vector to keep the base confidence of all samples after each booosting stage
00107     TVec<real> base_awards;                        // vector to comput the base awad of all samples after each boosting stage
00108     TVec<real> exp_weighted_edges;                 // vector to precompute the exp of the weighted edges for all samples after each boosting stages
00109     real sum_exp_weighted_edges;                   // to sum the exp of the weighted edges for all samples after each boosting stages
00110 
00111 /*
00112   Work fields for the line search: they are sized and initialized if need be, at buid time
00113 */      
00114     
00115     real bracketing_learning_rate;              // various parameter of the line search initialised at the onset
00116     real bracketing_zero;                       // more of the same
00117     real interpolation_learning_rate;           // more of the same
00118     real interpolation_precision;               // more of the same
00119     real max_learning_rate;                     // more of the same
00120     real bracket_a_start;                       // more of the same
00121     real bracket_b_start;                       // more of the same
00122     int  iter;                                  // iteration counter of the line search
00123     real x_a, x_b, x_c, x_d, x_e;               // absissa value of various points thru the interpolation of the search
00124     real x_u, x_v, x_w, x_x;                    // more of the same
00125     real x_xmed, x_lim;                         // more of the same
00126     real f_a, f_b, f_c;                         // corresponding evaluation of the function to minimize
00127     real f_u, f_v, f_w, f_x;                    // more of the same
00128     real t_p, t_q, t_r, t_sav;                  // intermediary calculation thru the interpolation
00129     real p_step, p_lim, p_tin;                  // various parameters set thru the interpolation
00130     real p_to1, p_tol1, p_tol2;                 // more of the same
00131   
00132 public:
00133     LocalMedBoost();
00134     virtual              ~LocalMedBoost();
00135    
00136     PLEARN_DECLARE_OBJECT(LocalMedBoost);
00137 
00138     static  void         declareOptions(OptionList& ol);
00139     virtual void         makeDeepCopyFromShallowCopy(CopiesMap& copies);
00140     virtual void         build();
00141     virtual void         train();
00142     virtual void         forget();
00143     virtual int          outputsize() const;
00144     virtual TVec<string> getTrainCostNames() const;
00145     virtual TVec<string> getTestCostNames() const;
00146     virtual void         computeOutput(const Vec& input, Vec& output) const;
00147     virtual void         computeOutputAndCosts(const Vec& input, const Vec& target, Vec& output, Vec& costs) const;
00148     virtual void         computeCostsFromOutputs(const Vec& input, const Vec& output,
00149                                                  const Vec& target, Vec& costs) const;
00150 private:
00151     void         build_();
00152     void         computeBaseAwards();
00153     void         computeLossBound();
00154     real         findArgminFunctionWeight();
00155     void         initializeLineSearch();
00156     real         computeFunctionWeightFormula(real alpha);
00157     void         recomputeSampleWeight();
00158     void         initializeSampleWeight();
00159     void         verbose(string the_msg, int the_level);
00160 };
00161 
00162 DECLARE_OBJECT_PTR(LocalMedBoost);
00163 
00164 } // end of namespace PLearn
00165 
00166 #endif
00167 
00168 
00169 /*
00170   Local Variables:
00171   mode:c++
00172   c-basic-offset:4
00173   c-file-style:"stroustrup"
00174   c-file-offsets:((innamespace . 0)(inline-open . 0))
00175   indent-tabs-mode:nil
00176   fill-column:79
00177   End:
00178 */
00179 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines