PLearn 0.1
BaggingLearner.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // BaggingLearner.cc
00004 //
00005 // Copyright (C) 2007 Xavier Saint-Mleux, ApSTAT Technologies inc.
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Xavier Saint-Mleux
00036 
00040 #include "BaggingLearner.h"
00041 #include <plearn/base/tostring.h>
00042 #include <plearn/base/ProgressBar.h>
00043 #include <plearn/misc/PLearnService.h>
00044 #include <plearn/misc/RemotePLearnServer.h>
00045 #include <plearn/vmat/MemoryVMatrix.h>
00046 
00047 namespace PLearn {
00048 using namespace std;
00049 
00050 PLEARN_IMPLEMENT_OBJECT(
00051     BaggingLearner,
00052     "Performs bagging on several sub-learners.",
00053     "Bagging consists in training several sub-learners (all obtained by a\n"
00054     "copy of the provided 'template_learner') on different subsets of the\n"
00055     "training data, then aggregating their outputs in order to make a test\n"
00056     "prediction (the way outputs are aggregated is governed by the 'stats'\n"
00057     "option).\n"
00058 );
00059 
00060 BaggingLearner::BaggingLearner(PP<Splitter> splitter_, 
00061                                PP<PLearner> template_learner_,
00062                                TVec<string> stats_,
00063                                int exclude_extremes_,
00064                                bool output_sub_outputs_)
00065     :splitter(splitter_),
00066      template_learner(template_learner_),
00067      stats(stats_),
00068      exclude_extremes(exclude_extremes_),
00069      output_sub_outputs(output_sub_outputs_)
00070 {
00071 }
00072 
00073 void BaggingLearner::declareOptions(OptionList& ol)
00074 {
00075     declareOption(ol, "splitter", &BaggingLearner::splitter,
00076         OptionBase::buildoption,
00077         "Splitter used to get bags. In each split, only the first set is\n"
00078         "used (as the training set for a bag). A typical splitter used in\n"
00079         "bagging is a BootstrapSplitter.", OptionBase::basic_level);
00080 
00081     declareOption(ol, "template_learner", &BaggingLearner::template_learner,
00082                   OptionBase::buildoption,
00083                   "Template for all sub-learners; deep-copied once for each bag.",
00084                   OptionBase::basic_level);
00085 
00086     declareOption(ol, "stats", &BaggingLearner::stats,
00087         OptionBase::buildoption,
00088         "Statistics used to combine outputs from all learners. You can use\n"
00089         "any statistic that can be computed by a StatsCollector.",
00090         OptionBase::basic_level);
00091 
00092     declareOption(ol, "exclude_extremes", &BaggingLearner::exclude_extremes,
00093                   OptionBase::buildoption,
00094                   "If >0, sub-learners outputs are sorted and the exclude_extremes "
00095                   "highest and lowest are excluded.");
00096                   
00097     declareOption(ol, "output_sub_outputs", &BaggingLearner::output_sub_outputs,
00098                   OptionBase::buildoption,
00099                   "Wether computeOutput should append sub-learners outputs to output.");
00100                   
00101     declareOption(ol, "learners", &BaggingLearner::learners,
00102                   OptionBase::learntoption,
00103                   "Trained sub-learners.");
00104 
00105     // Now call the parent class' declareOptions
00106     inherited::declareOptions(ol);
00107 }
00108 
00110 // build_ //
00112 void BaggingLearner::build_()
00113 {}
00114 
00116 // build //
00118 void BaggingLearner::build()
00119 {
00120     inherited::build();
00121     build_();
00122 }
00123 
00125 // makeDeepCopyFromShallowCopy //
00127 void BaggingLearner::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00128 {
00129     inherited::makeDeepCopyFromShallowCopy(copies);
00130     deepCopyField(splitter,         copies);
00131     deepCopyField(template_learner, copies);
00132     deepCopyField(stats,            copies);
00133     deepCopyField(learners,         copies);
00134     deepCopyField(learners_outputs, copies);
00135     deepCopyField(outputs,          copies);
00136     deepCopyField(learner_costs,    copies);
00137     deepCopyField(last_test_input,  copies);
00138     // TODO Do we need to deep-copy stcol?
00139 }
00140 
00142 // outputsize //
00144 int BaggingLearner::outputsize() const
00145 { 
00146     PLASSERT(template_learner);
00147     PLASSERT(splitter);
00148     int sz= template_learner->outputsize() * stats.length(); 
00149     if(output_sub_outputs)
00150         sz+= template_learner->outputsize() * splitter->nsplits();
00151     return sz;
00152 }
00153 
00155 // forget //
00157 void BaggingLearner::forget()
00158 {
00159     for(int i= 0; i < learners.length(); ++i)
00160         learners[i]->forget();
00161     inherited::forget();
00162 }
00163 
00165 // train //
00167 void BaggingLearner::train()
00168 {
00169     PLASSERT(train_set);
00170     if(!splitter)
00171         PLERROR("BaggingLearner::train() needs a splitter.");
00172     if(!template_learner)
00173         PLERROR("BaggingLearner::train() needs a template learner.");
00174     if(nstages != 1)
00175         PLERROR("BaggingLearner.nstages should be 1 (not %d).", nstages);
00176     if(splitter->nSetsPerSplit() != 1)
00177         PLERROR("BaggingLearner.splitter->nSetsPerSplit() should be 1 (not %d).", 
00178                 splitter->nSetsPerSplit());
00179 
00180     splitter->setDataSet(train_set);
00181 
00182     if (!initTrain())
00183         return;
00184 
00185     // init learners
00186     int nbags= splitter->nsplits();
00187     if(learners.size() != nbags)
00188     {
00189         learners.resize(nbags);
00190         for(int i= 0; i < nbags; ++i)
00191         {
00192             CopiesMap c;
00193             learners[i]= template_learner->deepCopy(c);
00194             learners[i]->report_progress= false;
00195         }
00196     }
00197 
00198     PP<ProgressBar> pb= 0;
00199     if(report_progress)
00200         pb= new ProgressBar("BaggingLearner::train",nbags);
00201 
00202     PLearnService& service(PLearnService::instance());
00203     int nservers= min(nbags, service.availableServers());
00204 
00205     if(nservers > 1 && parallelize_here)//parallel train
00206     {
00207         TVec<PP<RemotePLearnServer> > servers= service.reserveServers(nservers);
00208         nservers= servers.length();
00209 
00210         map<PP<RemotePLearnServer>, int> learners_ids;
00211         map<PP<RemotePLearnServer>, int> bagnums;
00212         map<PP<RemotePLearnServer>, int> step;
00213 
00214         for(int i= 0; i < nservers; ++i)
00215         {
00216             RemotePLearnServer* s= servers[i];
00217             int id= s->newObject(*learners[i]);
00218             VMat sts= splitter->getSplit(i)[0];
00219             if(master_sends_testset_rows)
00220                 sts= new MemoryVMatrix(sts.toMat());
00221             s->callMethod(id, "setTrainingSet", sts, true);
00222             learners_ids[s]= id;
00223             bagnums[s]= i;
00224             step[s]= 1;
00225         }
00226 
00227         int lastbag= nservers-1;
00228         int ndone= 0;
00229 
00230         while(nservers > 0)
00231         {
00232             PP<RemotePLearnServer> s= service.waitForResult();
00233             switch(step[s])
00234             {
00235             case 1: 
00236                 DBG_LOG << "** get setTrainingSet result" << endl;
00237                 s->getResults();//from setTrainingSet
00238                 s->callMethod(learners_ids[s], "train");
00239                 step[s]= 2;
00240                 break;
00241             case 2:
00242                 DBG_LOG << "** get train result" << endl;
00243                 s->getResults();//from train
00244                 if(pb) pb->update(++ndone);
00245                 s->callMethod(learners_ids[s], "getObject");
00246                 step[s]= 3;
00247                 break;
00248             case 3:
00249                 DBG_LOG << "** get getObject result" << endl;
00250                 s->getResults(learners[bagnums[s]]);//from getObject
00251                 s->deleteObject(learners_ids[s]);
00252                 if(++lastbag < nbags)
00253                 {
00254                     int id= s->newObject(*learners[lastbag]);
00255                     VMat sts= splitter->getSplit(lastbag)[0];
00256                     if(master_sends_testset_rows)
00257                         sts= new MemoryVMatrix(sts.toMat());
00258                     s->callMethod(id, "setTrainingSet", sts, true);
00259                     learners_ids[s]= id;
00260                     bagnums[s]= lastbag;
00261                     step[s]= 1;
00262                 }
00263                 else
00264                 {
00265                     service.freeServer(s);
00266                     --nservers;
00267                 }
00268                 break;
00269             }
00270         }
00271 
00272         return; // avoid extra indentation
00273     }
00274 
00275     // sequential train
00276     for(int i= 0; i < nbags; ++i)
00277     {
00278         PP<PLearner> l = learners[i];
00279         l->setTrainingSet(splitter->getSplit(i)[0]);
00280         l->train();
00281         if(pb) pb->update(i);
00282     }
00283 
00284     stage++;
00285     PLASSERT( stage == 1 );
00286 }
00287 
00289 // computeOutput //
00291 void BaggingLearner::computeOutput(const Vec& input, Vec& output) const
00292 {
00293     int nout = outputsize();
00294     output.resize(nout);
00295     int nlearners= learners.size();
00296     PLASSERT(template_learner);
00297     int sub_nout = template_learner->outputsize();
00298     learners_outputs.resize(nlearners, sub_nout);
00299 
00300     last_test_input.resize(input.size());
00301     last_test_input << input;//save it, to test in computeCostsFromOutputs
00302 
00303     for(int i= 0; i < nlearners; ++i)
00304     {
00305         Vec outp= learners_outputs(i);
00306         learners[i]->computeOutput(input, outp);
00307     }
00308 
00309     if(exclude_extremes > 0)
00310     {
00311         outputs.resize(nlearners, sub_nout);
00312         outputs << learners_outputs;
00313         //exclude highest and lowest n predictions for each output
00314         int nexcl= 2*exclude_extremes;
00315         if(nlearners <= nexcl)
00316             PLERROR("BaggingLearner::computeOutput : Cannot exclude all outputs! "
00317                     "nlearners=%d, exclude_extremes=%d",nlearners,exclude_extremes);
00318         // sort all in place, one output at a time
00319         for(int j= 0; j < sub_nout; ++j)
00320             sortElements(outputs.column(j).toVec());
00321         // exclude from both ends
00322         outputs= outputs.subMatRows(exclude_extremes, outputs.length()-nexcl);
00323         nlearners-= nexcl;
00324     }
00325     else 
00326         outputs= learners_outputs;
00327 
00328     stcol.forget();
00329     for(int i= 0; i < outputs.length(); ++i)
00330         stcol.update(outputs(i));
00331     
00332     int i= 0;
00333     for(int j= 0; j < stcol.size(); ++j)
00334         for(TVec<string>::iterator it= stats.begin();
00335             it != stats.end(); ++it)
00336             output[i++]= stcol.getStats(j).getStat(*it);
00337 
00338     if(output_sub_outputs)
00339         for(int j= 0; j < nlearners; ++j)
00340             for(int k= 0; k < sub_nout; ++k)
00341                 output[i++]= learners_outputs(j,k);
00342 }
00343 
00345 // computeCostsFromOutputs //
00347 void BaggingLearner::computeCostsFromOutputs(const Vec& input, const Vec& output,
00348                                              const Vec& target, Vec& costs) const
00349 {
00350     //if(input != last_test_input)
00351     if (!input.isEqual(last_test_input, true))
00352         PLERROR("BaggingLearner::computeCostsFromOutputs has to be called "
00353                 "right after computeOutput, with the same input.");
00354     
00355     int nlearners= learners.size();
00356     costs.resize(nTestCosts());
00357     int k= 0;
00358     for(int i= 0; i < nlearners; ++i)
00359     {
00360         Vec subcosts;
00361         learners[i]->computeCostsFromOutputs(input, learners_outputs(i),
00362                                              target, subcosts);
00363         for(int j= 0; j < subcosts.length(); ++j)
00364             costs[k++]= subcosts[j];
00365     }
00366 
00367 }
00368 
00370 // getTestCostNames //
00372 TVec<string> BaggingLearner::getTestCostNames() const
00373 {
00374     PLASSERT(splitter);
00375     PLASSERT(template_learner);
00376     int nbags= splitter->nsplits();
00377     TVec<string> subcosts= template_learner->getTestCostNames();
00378     TVec<string> costnames(nTestCosts());
00379     int nsubcosts= subcosts.length();
00380     int k= 0;
00381     for(int i= 0; i < nbags; ++i)
00382         for(int j= 0; j < nsubcosts; ++j)
00383             costnames[k++]= string("learner")+tostring(i)+"."+subcosts[j];
00384     return costnames;
00385 }
00386 
00388 // getTrainCostNames //
00390 TVec<string> BaggingLearner::getTrainCostNames() const
00391 {
00392     return TVec<string>(); // for now
00393 }
00394 
00396 // nTestCosts //
00398 int BaggingLearner::nTestCosts() const
00399 {
00400     PLASSERT(splitter);
00401     PLASSERT(template_learner);
00402     return splitter->nsplits()*template_learner->nTestCosts();
00403 }
00404 
00406 // nTrainCosts //
00408 int BaggingLearner::nTrainCosts() const
00409 {
00410     return 0;
00411 }
00412 
00414 // resetInternalState //
00416 void BaggingLearner::resetInternalState()
00417 {
00418     for(int i= 0; i < learners.length(); ++i)
00419         learners[i]->resetInternalState();
00420 }
00421 
00423 // isStatefulLearner //
00425 bool BaggingLearner::isStatefulLearner() const
00426 {
00427     PLASSERT(template_learner);
00428     return template_learner->isStatefulLearner();
00429 }
00430 
00432 // setTrainingSet //
00434 void BaggingLearner::setTrainingSet(VMat training_set, bool call_forget)
00435 {
00436     PLASSERT(template_learner);
00437     //set template learner's train set so that we can get 
00438     //output size and names (among others)
00439     template_learner->setTrainingSet(training_set, call_forget);
00440     inherited::setTrainingSet(training_set, call_forget);
00441 }
00442 
00444 // getOutputNames //
00446 TVec<string> BaggingLearner::getOutputNames() const
00447 {
00448     PLASSERT(template_learner);
00449     PLASSERT(splitter);
00450     TVec<string> suboutputnames= template_learner->getOutputNames();
00451     TVec<string> outputnames= addStatNames(suboutputnames);
00452     if(output_sub_outputs)
00453     {
00454         int nbags= splitter->nsplits();
00455         int nsout= suboutputnames.length();
00456         for(int i= 0; i < nbags; ++i)
00457             for(int j= 0; j < nsout; ++j)
00458                 outputnames.append(string("learner")+tostring(i)+"."+suboutputnames[j]);
00459     }
00460     return outputnames;
00461 }
00462 
00464 // setTrainStatsCollector //
00466 void BaggingLearner::setTrainStatsCollector(PP<VecStatsCollector> statscol)
00467 {
00468     inherited::setTrainStatsCollector(statscol);
00469     template_learner->setTrainStatsCollector(statscol);
00470 }
00471 
00472 
00474 // setExperimentDirectory //
00476 void BaggingLearner::setExperimentDirectory(const PPath& the_expdir)
00477 {
00478     inherited::setExperimentDirectory(the_expdir);
00479     template_learner->setExperimentDirectory(the_expdir / "BaggingSubLearner");
00480 }
00481 
00482 
00483 
00484 } // end of namespace PLearn
00485 
00486 
00487 /*
00488   Local Variables:
00489   mode:c++
00490   c-basic-offset:4
00491   c-file-style:"stroustrup"
00492   c-file-offsets:((innamespace . 0)(inline-open . 0))
00493   indent-tabs-mode:nil
00494   fill-column:79
00495   End:
00496 */
00497 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines