PLearn 0.1
BestAveragingPLearner.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // BestAveragingPLearner.cc
00004 //
00005 // Copyright (C) 2006 Nicolas Chapados
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Nicolas Chapados
00036 
00039 // From PLearn
00040 #include "BestAveragingPLearner.h"
00041 #include <plearn/base/ProgressBar.h>
00042 
00043 // From C++ stdlib
00044 #include <algorithm>
00045 
00046 namespace PLearn {
00047 using namespace std;
00048 
00049 PLEARN_IMPLEMENT_OBJECT(
00050     BestAveragingPLearner,
00051     "Select the M \"best\" of N trained PLearners based on a train cost",
00052     "This PLearner takes N raw models (themselved PLearners) and trains them all\n"
00053     "on the same data (or various splits given by an optional Splitter), then\n"
00054     "selects the M \"best\" models based on a train cost.  At compute-output time,\n"
00055     "it outputs the arithmetic mean of the outputs of the selected models (which\n"
00056     "works fine for regression).\n"
00057     "\n"
00058     "The train costs of this learner are simply the concatenation of the train\n"
00059     "costs of all sublearners.  We also add the following costs: the cost\n"
00060     "'selected_i', where 0 <= i < M, contains the index of the selected model\n"
00061     "(between 0 and N-1).\n"
00062     "\n"
00063     "The test costs of this learner is, for now, just the mse.\n"
00064     );
00065 
00066 BestAveragingPLearner::BestAveragingPLearner()
00067     : m_initial_seed(-1),
00068       m_seed_option("seed"),
00069       m_total_learner_num(0),
00070       m_best_learner_num(0),
00071       m_cached_outputsize(-1)
00072 { }
00073 
00074 void BestAveragingPLearner::declareOptions(OptionList& ol)
00075 {
00076     //#####  Build Options  ###################################################
00077 
00078     declareOption(
00079         ol, "learner_set", &BestAveragingPLearner::m_learner_set,
00080         OptionBase::buildoption,
00081         "The set of all learners to train, given in extension.  If this option\n"
00082         "is specified, the learner template (see below) is ignored.  Note that\n"
00083         "these objects ARE NOT deep-copied before being trained.\n");
00084 
00085     declareOption(
00086         ol, "learner_template", &BestAveragingPLearner::m_learner_template,
00087         OptionBase::buildoption,
00088         "If 'learner_set' is not specified, a template PLearner used to\n"
00089         "instantiate 'learner_set'.  When instantiation is being carried out,\n"
00090         "the seed is set sequentially from 'initial_seed'.  The instantiation\n"
00091         "sequence is as follows:\n"
00092         "\n"
00093         "- (1) template is deep-copied\n"
00094         "- (2) seed and expdir are set\n"
00095         "- (3) build() is called\n"
00096         "- (4) forget() is called\n"
00097         "\n"
00098         "The expdir is set from the BestAveragingPLearner's expdir (if any)\n"
00099         "by suffixing '/learner_i'.\n");
00100 
00101     declareOption(
00102         ol, "initial_seed", &BestAveragingPLearner::m_initial_seed,
00103         OptionBase::buildoption,
00104         "If learners are instantiated from 'learner_template', the initial seed\n"
00105         "value to set into the learners before building them.  The seed is\n"
00106         "incremented by one from that starting point for each successive learner\n"
00107         "that is being instantiated.  If this value is <= 0, it is used as-is\n"
00108         "without being incremented.\n");
00109 
00110     declareOption(
00111         ol, "seed_option", &BestAveragingPLearner::m_seed_option,
00112         OptionBase::buildoption,
00113         "Use in conjunction with 'initial_seed'; option name pointing to the\n"
00114         "seed to be initialized.  The default is just 'seed', which is the\n"
00115         "PLearner option name for the seed, and is adequate if the\n"
00116         "learner_template is \"shallow\", such as NNet.  This option is useful if\n"
00117         "the learner_template is a complex learner (e.g. HyperLearner) and the\n"
00118         "seed must actually be set inside one of the inner learners.  In the\n"
00119         "particular case of HyperLearner, one could use 'learner.seed' as the\n"
00120         "value for this option.\n");
00121     
00122     declareOption(
00123         ol, "total_learner_num", &BestAveragingPLearner::m_total_learner_num,
00124         OptionBase::buildoption,
00125         "Total number of learners to instantiate from learner_template (if\n"
00126         "'learner_set' is not specified.\n");
00127 
00128     declareOption(
00129         ol, "best_learner_num", &BestAveragingPLearner::m_best_learner_num,
00130         OptionBase::buildoption,
00131         "Number of BEST train-time learners to keep and average at\n"
00132         "compute-output time.\n");
00133 
00134     declareOption(
00135         ol, "comparison_statspec", &BestAveragingPLearner::m_comparison_statspec,
00136         OptionBase::buildoption,
00137         "Statistic specification to use to compare the training performance\n"
00138         "between learners.  For example, if all learners have a 'mse' measure,\n"
00139         "this would be \"E[mse]\".  It is assumed that all learners make available\n"
00140         "the statistic under the same name.\n");
00141 
00142     declareOption(
00143         ol, "splitter", &BestAveragingPLearner::m_splitter,
00144         OptionBase::buildoption,
00145         "Optional splitter that can be used to create the individual training\n"
00146         "sets for the learners.  If this is specified, it is assumed that the\n"
00147         "splitter returns a number of splits equal to the number of learners.\n"
00148         "Each split is used as a learner's training set.  If not specified,\n"
00149         "all learners receive the same training set (passed to setTrainingSet)\n");
00150 
00151     
00152     //#####  Learnt Options  ##################################################
00153 
00154     declareOption(
00155         ol, "cached_outputsize", &BestAveragingPLearner::m_cached_outputsize,
00156         OptionBase::learntoption,
00157         "Cached outputsize, determined from the inner learners");
00158     
00159     declareOption(
00160         ol, "learner_train_costs", &BestAveragingPLearner::m_learner_train_costs,
00161         OptionBase::learntoption,
00162         "List of train costs values for each learner in 'learner_set'");
00163 
00164     declareOption(
00165         ol, "best_learners", &BestAveragingPLearner::m_best_learners,
00166         OptionBase::learntoption,
00167         "Learners that have been found to be the best and are being kept");
00168     
00169     // Now call the parent class' declareOptions
00170     inherited::declareOptions(ol);
00171 }
00172 
00173 void BestAveragingPLearner::build_()
00174 {
00175     if (! m_learner_set.size() && m_learner_template.isNull())
00176         PLERROR("%s: one of 'learner_set' or 'learner_template' must be specified",
00177                 __FUNCTION__);
00178 
00179     // If both 'learner_set' and 'learner_template' are specified, the former
00180     // silently overrides the latter.  Reason: after instantiation of
00181     // learner_template, stuff is put into learner_set as a result.
00182     if (! m_learner_set.size() && m_learner_template) {
00183         // Sanity check on other options
00184         if (m_total_learner_num < 1)
00185             PLERROR("%s: 'total_learner_num' must be strictly positive",
00186                     __FUNCTION__);
00187 
00188         const int N = m_total_learner_num;
00189         int32_t cur_seed = m_initial_seed;
00190         m_learner_set.resize(N);
00191         for (int i=0 ; i<N ; ++i) {
00192             PP<PLearner> new_learner = PLearn::deepCopy(m_learner_template);
00193             new_learner->setOption(m_seed_option, tostring(cur_seed));
00194             new_learner->build();
00195             new_learner->forget();
00196             m_learner_set[i] = new_learner;
00197 
00198             if (cur_seed > 0)
00199                 ++cur_seed;
00200         }
00201     }
00202 
00203     // Some more sanity checking
00204     if (m_best_learner_num < 1)
00205         PLERROR("%s: 'best_learner_num' must be strictly positive", __FUNCTION__);
00206     if (m_best_learner_num > m_learner_set.size())
00207         PLERROR("%s: 'best_learner_num' (=%d) must not be larger than the total "
00208                 "number of learners (=%d)", __FUNCTION__, m_best_learner_num,
00209                 m_learner_set.size());
00210 }
00211 
00212 // ### Nothing to add here, simply calls build_
00213 void BestAveragingPLearner::build()
00214 {
00215     inherited::build();
00216     build_();
00217 }
00218 
00219 
00220 void BestAveragingPLearner::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00221 {
00222     inherited::makeDeepCopyFromShallowCopy(copies);
00223 
00224     deepCopyField(m_learner_set,         copies);
00225     deepCopyField(m_learner_template,    copies);
00226     deepCopyField(m_splitter,            copies);
00227     deepCopyField(m_learner_train_costs, copies);
00228     deepCopyField(m_best_learners,       copies);
00229     deepCopyField(m_output_buffer,       copies);
00230 }
00231 
00232 
00233 void BestAveragingPLearner::setTrainingSet(VMat training_set, bool call_forget)
00234 {
00235     inherited::setTrainingSet(training_set, call_forget);
00236 
00237     // Make intelligent use of splitter if any
00238     if (m_splitter) {
00239         m_splitter->setDataSet(training_set);
00240 
00241         // Splitter should return exactly the same number of splits as there
00242         // are inner learners
00243         if (m_splitter->nsplits() != m_learner_set.size())
00244             PLERROR("%s: splitter should return exactly the same number of splits (=%d) "
00245                     "as there are inner learners (=%d)", __FUNCTION__,
00246                     m_splitter->nsplits(), m_learner_set.size());
00247 
00248         for (int i=0, n=m_learner_set.size() ; i<n ; ++i) {
00249             TVec<VMat> split = m_splitter->getSplit(i);
00250             if (split.size() != 1)
00251                 PLERROR("%s: split %d should return exactly 1 VMat (returned %d)",
00252                         __FUNCTION__, i, split.size());
00253             m_learner_set[i]->setTrainingSet(split[0], call_forget);
00254         }
00255     }
00256     else {
00257         for (int i=0, n=m_learner_set.size() ; i<n ; ++i)
00258             m_learner_set[i]->setTrainingSet(training_set, call_forget);
00259     }
00260 }
00261 
00262 
00263 void BestAveragingPLearner::setTrainStatsCollector(PP<VecStatsCollector> statscol)
00264 {
00265     inherited::setTrainStatsCollector(statscol);
00266     for (int i=0, n=m_learner_set.size() ; i<n ; ++i) {
00267         // Set the statistic names so we can call getStat on the VSC.
00268         PP<VecStatsCollector> vsc = new VecStatsCollector;
00269         vsc->setFieldNames(m_learner_set[i]->getTrainCostNames());
00270         m_learner_set[i]->setTrainStatsCollector(vsc);
00271     }
00272 }
00273 
00274 
00275 void BestAveragingPLearner::setExperimentDirectory(const PPath& the_expdir)
00276 {
00277     inherited::setExperimentDirectory(the_expdir);
00278     if (! the_expdir.isEmpty()) {
00279         for (int i=0, n=m_learner_set.size() ; i<n ; ++i)
00280             m_learner_set[i]->setExperimentDirectory(
00281                 the_expdir / "learner_" + tostring(i));
00282     }
00283 }
00284 
00285 
00286 int BestAveragingPLearner::outputsize() const
00287 {
00288     // If the outputsize has not already been determined, get it from the inner
00289     // learners.
00290     if (m_cached_outputsize < 0) {
00291         for (int i=0, n=m_learner_set.size() ; i<n ; ++i) {
00292             int cur_outputsize = m_learner_set[i]->outputsize();
00293             if (m_cached_outputsize < 0)
00294                 m_cached_outputsize = cur_outputsize;
00295             else if (m_cached_outputsize != cur_outputsize)
00296                 PLERROR("%s: all inner learners must have the same outputsize; "
00297                         "learner %d has an outputsize of %d, contrarily to %d for "
00298                         "the previous learners",  __FUNCTION__, i, cur_outputsize,
00299                         m_cached_outputsize);
00300         }
00301     }
00302     return m_cached_outputsize;
00303 }
00304 
00305 void BestAveragingPLearner::forget()
00306 {
00307     inherited::forget();
00308 
00309     for (int i=0, n=m_learner_set.size() ; i<n ; ++i)
00310         m_learner_set[i]->forget();
00311 }
00312 
00313 void BestAveragingPLearner::train()
00314 {
00315     if (! initTrain())
00316         return;
00317  
00318     const int N = m_learner_set.size();
00319     m_learner_train_costs.resize(N);
00320     TVec< pair<real, int> > model_scores(N);
00321     PP<ProgressBar> pb(verbosity?
00322         new ProgressBar("Training sublearners of BestAveragingPLearner",N) : 0);
00323 
00324     // Basic idea is to train all sublearners, then sample the train statistic
00325     // used for comparison, and fill out the member variable 'm_best_learners'.
00326     // Finally we collect the expectation of their sublearners train statistics
00327     // (to give this learner's train statistics)
00328 
00329     // Actual train-cost vector
00330     Vec traincosts(nTrainCosts());
00331     int pos_traincost = 0;
00332     
00333     for (int i=0 ; i<N ; ++i) {
00334         if (pb)
00335             pb->update(i);
00336         m_learner_set[i]->train();
00337 
00338         PP<VecStatsCollector> trainstats = m_learner_set[i]->getTrainStatsCollector();
00339         real cur_comparison = trainstats->getStat(m_comparison_statspec);
00340         m_learner_train_costs[i] = cur_comparison;
00341         model_scores[i] = make_pair(cur_comparison, i);
00342 
00343         Vec cur_traincosts = trainstats->getMean();
00344         traincosts.subVec(pos_traincost, cur_traincosts.size()) << cur_traincosts;
00345         pos_traincost += cur_traincosts.size();
00346     }
00347 
00348     // Find the M best train costs
00349     sort(model_scores.begin(), model_scores.end());
00350     PLASSERT( m_best_learner_num <= model_scores.size() );
00351     m_best_learners.resize(m_best_learner_num);
00352     for (int i=0 ; i<m_best_learner_num ; ++i) {
00353         m_best_learners[i] = m_learner_set[model_scores[i].second];
00354         traincosts[pos_traincost++] = model_scores[i].second;
00355     }
00356 
00357     // Accumulate into train statscollector
00358     PLASSERT( getTrainStatsCollector() );
00359     getTrainStatsCollector()->update(traincosts);
00360 }
00361 
00362 
00363 void BestAveragingPLearner::computeOutput(const Vec& input, Vec& output) const
00364 {
00365     output.resize(outputsize());
00366     output.fill(0.0);
00367     m_output_buffer.resize(outputsize());
00368 
00369     // Basic strategy: accumulate into output, then divide by number of
00370     // learners (take unweighted arithmetic mean).  Works fine as long as we
00371     // don't accumulate millions of terms...
00372     for (int i=0, n=m_best_learners.size() ; i<n ; ++i) {
00373         m_best_learners[i]->computeOutput(input, m_output_buffer);
00374         output += m_output_buffer;
00375     }
00376     output /= m_best_learners.size();
00377 }
00378 
00379 
00380 void BestAveragingPLearner::computeCostsFromOutputs(const Vec& input, const Vec& output,
00381                                                     const Vec& target, Vec& costs) const
00382 {
00383     // For now, only MSE is computed...
00384     real mse = powdistance(output, target);
00385     costs.resize(1);
00386     costs[0] = mse;
00387 }
00388 
00389 
00390 TVec<string> BestAveragingPLearner::getTestCostNames() const
00391 {
00392     return TVec<string>(1, "mse");
00393 }
00394 
00395 
00396 TVec<string> BestAveragingPLearner::getTrainCostNames() const
00397 {
00398     TVec<string> c;
00399     for (int i=0, n=m_learner_set.size() ; i<n ; ++i) {
00400         TVec<string> learner_costs = m_learner_set[i]->getTrainCostNames().copy();
00401         for (int j=0, m=learner_costs.size() ; j<m ; ++j)
00402             learner_costs[j] = "learner"+tostring(i)+'_'+learner_costs[j];
00403         c.append(learner_costs);
00404     }
00405 
00406     for (int i=0 ; i<m_best_learner_num ; ++i)
00407         c.push_back("selected_" + tostring(i));
00408 
00409     return c;
00410 }
00411 
00412 
00413 } // end of namespace PLearn
00414 
00415 
00416 /*
00417   Local Variables:
00418   mode:c++
00419   c-basic-offset:4
00420   c-file-style:"stroustrup"
00421   c-file-offsets:((innamespace . 0)(inline-open . 0))
00422   indent-tabs-mode:nil
00423   fill-column:79
00424   End:
00425 */
00426 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines