PLearn 0.1
SequentialValidation.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // SequentialValidation.cc
00004 //
00005 // Copyright (C) 2003 Rejean Ducharme, Yoshua Bengio
00006 // Copyright (C) 2003 Pascal Vincent
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 //
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 //
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 //
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 //
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 //
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 
00037 // From stdlib
00038 #include <sys/types.h>
00039 #include <unistd.h>                          // for getpid
00040 
00041 // From PLeearn
00042 #include "SequentialValidation.h"
00043 #include <plearn/vmat/FileVMatrix.h>
00044 #include <plearn/base/stringutils.h>
00045 #include <plearn/io/MatIO.h>
00046 #include <plearn/io/load_and_save.h>
00047 
00048 namespace PLearn {
00049 using namespace std;
00050 
00051 
00052 PLEARN_IMPLEMENT_OBJECT(
00053     SequentialValidation,
00054     "The SequentialValidation class allows you to describe a typical "
00055     "sequential validation experiment that you wish to perform.",
00056     "NO HELP");
00057 
00058 SequentialValidation::SequentialValidation()
00059     : init_train_size(1),
00060       warmup_size(0),
00061       train_step(1),
00062       last_test_time(-1),
00063       expdir(""),
00064       report_stats(true),
00065       save_final_model(true),
00066       save_initial_model(false),
00067       save_initial_seqval(true),
00068       save_data_sets(false),
00069       save_test_outputs(false),
00070       save_test_costs(false),
00071       save_stat_collectors(false),
00072       provide_learner_expdir(true),
00073       save_sequence_stats(true),
00074       report_memory_usage(false)
00075 {}
00076 
00077 void SequentialValidation::build_()
00078 {
00079     if ( dataset && dataset->inputsize() < 0 )
00080         dataset->defineSizes(dataset->width(), 0, 0);
00081 }
00082 
00083 void SequentialValidation::build()
00084 {
00085     inherited::build();
00086     build_();
00087 }
00088 
00089 void SequentialValidation::declareOptions(OptionList& ol)
00090 {
00091     declareOption(
00092         ol, "report_stats", &SequentialValidation::report_stats,
00093         OptionBase::buildoption,
00094         "If true, the computed global statistics specified in statnames will be saved in global_stats.pmat \n"
00095         "and the corresponding per-split statistics will be saved in split_stats.pmat \n"
00096         "For reference, all cost names (as given by the learner's getTrainCostNames() and getTestCostNames() ) \n"
00097         "will be reported in files train_cost_names.txt and test_cost_names.txt");
00098 
00099     declareOption(
00100         ol, "statnames", &SequentialValidation::statnames,
00101         OptionBase::buildoption,
00102         "A list of global statistics we are interested in.\n"
00103         "These are strings of the form S1[S2[dataset.cost_name]] where:\n"
00104         "  - dataset is train or test1 or test2 ... (train being \n"
00105         "    the first dataset in a split, test1 the second, ...) \n"
00106         "  - cost_name is one of the training or test cost names (depending on dataset) understood \n"
00107         "    by the underlying learner (see its getTrainCostNames and getTestCostNames methods) \n"
00108         "  - S1 and S2 are a statistic, i.e. one of: E (expectation), V(variance), MIN, MAX, STDDEV, ... \n"
00109         "    S2 is computed over the samples of a given dataset split. S1 is over the splits. \n");
00110 
00111     declareOption(
00112         ol, "timewise_statnames", &SequentialValidation::timewise_statnames,
00113         OptionBase::buildoption,
00114         "Statistics to be collected into a VecStatsCollector at each timestep.");
00115   
00116     declareOption(
00117         ol, "expdir", &SequentialValidation::expdir,
00118         OptionBase::buildoption,
00119         "Path of this experiment's directory in which to save all experiment results (will be created if it does not already exist). \n");
00120 
00121     declareOption(
00122         ol, "learner", &SequentialValidation::learner,
00123         OptionBase::buildoption,
00124         "The SequentialLearner to train/test. \n");
00125 
00126     declareOption(
00127         ol, "accessory_learners", &SequentialValidation::accessory_learners,
00128         OptionBase::buildoption,
00129         "Accessory learners that must be managed in parallel with the main one." );
00130   
00131     declareOption(
00132         ol, "dataset", &SequentialValidation::dataset,
00133         OptionBase::buildoption,
00134         "The dataset to use for training/testing. \n");
00135 
00136     declareOption(
00137         ol, "init_train_size", &SequentialValidation::init_train_size,
00138         OptionBase::buildoption,
00139         "Size of the first training set.  Before starting the train/test cycle,\n"
00140         "the method setTestStartTime() is called on the learner with init_train_size\n"
00141         "as argument.");
00142 
00143     declareOption(
00144         ol, "warmup_size", &SequentialValidation::warmup_size,
00145         OptionBase::buildoption,
00146         "If specified, this is a number of time-steps that are taken FROM THE\n"
00147         "END of init_train_size to start \"testing\" (i.e. alternating between\n"
00148         "train and test), but WITHOUT ACCUMULATING ANY TEST STATISTICS.  In\n"
00149         "other words, this is a \"warmup\" period just before the true test.\n"
00150         "Before starting the real test period, the setTestStartTime() method is\n"
00151         "called on the learner, followed by resetInternalState().  Note that\n"
00152         "the very first \"init_train_size\" is REDUCED by the warmup_size.\n");
00153   
00154     declareOption(
00155         ol, "train_step", &SequentialValidation::train_step,
00156         OptionBase::buildoption,
00157         "At how many timesteps must we retrain? (default: 1)");
00158   
00159     declareOption(
00160         ol, "last_test_time", &SequentialValidation::last_test_time,
00161         OptionBase::buildoption,
00162         "The last time-step to use for testing (Default = -1, i.e. use all data)");
00163   
00164     declareOption(
00165         ol, "save_final_model", &SequentialValidation::save_final_model,
00166         OptionBase::buildoption,
00167         "If true, the final model will be saved in model.psave \n");
00168 
00169     declareOption(
00170         ol, "save_initial_model", &SequentialValidation::save_initial_model,
00171         OptionBase::buildoption,
00172         "If true, the initial model will be saved in initial_model.psave. \n");
00173 
00174     declareOption(
00175         ol, "save_initial_seqval", &SequentialValidation::save_initial_seqval,
00176         OptionBase::buildoption,
00177         "If true, this SequentialValidation object will be saved in sequential_validation.psave. \n");
00178 
00179     declareOption(
00180         ol, "save_data_sets", &SequentialValidation::save_data_sets,
00181         OptionBase::buildoption,
00182         "If true, the data sets (train/test) for each split will be saved. \n");
00183 
00184     declareOption(
00185         ol, "save_test_outputs", &SequentialValidation::save_test_outputs,
00186         OptionBase::buildoption,
00187         "If true, the outputs of the tests will be saved in test_outputs.pmat \n");
00188 
00189     declareOption(
00190         ol, "save_test_costs", &SequentialValidation::save_test_costs,
00191         OptionBase::buildoption,
00192         "If true, the costs of the tests will be saved in test_costs.pmat \n");
00193 
00194     declareOption(
00195         ol, "save_stat_collectors", &SequentialValidation::save_stat_collectors,
00196         OptionBase::buildoption,
00197         "If true, stat collectors of each data sets (train/test) will be saved for each split. \n");
00198 
00199     declareOption(
00200         ol, "provide_learner_expdir", &SequentialValidation::provide_learner_expdir,
00201         OptionBase::buildoption,
00202         "If true, learning results from the learner will be saved. \n");
00203 
00204     declareOption(
00205         ol, "save_sequence_stats",
00206         &SequentialValidation::save_sequence_stats,
00207         OptionBase::buildoption,
00208         "Whether the statistics accumulated at each time step should\n"
00209         "be saved in the file \"sequence_stats.pmat\".  WARNING: this\n"
00210         "file can get big!  (Default = 1, i.e. true)");
00211 
00212     declareOption(
00213         ol, "report_memory_usage",
00214         &SequentialValidation::report_memory_usage,
00215         OptionBase::buildoption,
00216         "Whether to report memory usage in a directory expdir/MemoryUsage.\n"
00217         "Memory usage is reported AT THE BEGINNING OF EACH time-step, using\n"
00218         "both the /proc/PID/status method, and the 'mem_usage PID' method\n"
00219         "(if available).  This is only supported on Linux at the moment.\n"
00220         "(Default = false)");
00221 
00222     declareOption(
00223         ol, "measure_after_train",
00224         &SequentialValidation::measure_after_train,
00225         OptionBase::buildoption,
00226         "List of options to \"measure\" AFTER training at each timestep, but\n"
00227         "BEFORE testing.  The options are specified as a list of pairs\n"
00228         "'option':'filename', where the option is measured with respect to the\n"
00229         "sequential validation object itself.  Hence, if the learner contains\n"
00230         "an option 'advisor' that you want to save at each time step, you would\n"
00231         "write [\"learner.advisor\":\"advisor.psave\"].  The files are saved in the\n"
00232         "splitdir directory, which is unique for each timestep.");
00233   
00234     inherited::declareOptions(ol);
00235 }
00236 
00237 void SequentialValidation::run()
00238 {  
00239     if (expdir=="")
00240         PLERROR("No expdir specified for SequentialValidation.");
00241     else
00242     {
00243         if(pathexists(expdir))
00244             PLERROR("Directory (or file) %s already exists. First move it out of the way.", expdir.c_str());
00245         if(!force_mkdir(expdir))
00246             PLERROR("Could not create experiment directory %s", expdir.c_str());
00247     }
00248 
00249     if (!learner)
00250         PLERROR("SequentialValidation::run: learner not specified.");
00251 
00252     if (warmup_size >= init_train_size)
00253         PLERROR("SequentialValidation::run: 'warmup_size' must be strictly smaller than "
00254                 "'init_train_size'");
00255 
00256     if (warmup_size < 0 || init_train_size < 0)
00257         PLERROR("SequentialValidation::run: negative warmup_size or init_train_size.");
00258   
00259     // Get a first dataset to set inputsize() and targetsize()
00260     VMat train_vmat = trainVMat(init_train_size);
00261     for ( int a=0; a < accessory_learners.length(); a++ )
00262         accessory_learners[a]->setTrainingSet( train_vmat, false );
00263     learner->setTrainingSet( train_vmat, false );
00264   
00265     setExperimentDirectory( append_slash(expdir) );
00266 
00267     // If we need to report memory usage, create the appropriate directory
00268     if (report_memory_usage)
00269         force_mkdir( expdir / "MemoryUsage" );
00270 
00271     // Save this experiment description in the expdir (buildoptions only)
00272     if (save_initial_seqval)
00273         PLearn::save(expdir / "sequential_validation.psave", *this);
00274 
00275     // Create the stat collectors and set them into the learner(s)
00276     createStatCollectors();
00277     createStatSpecs();
00278   
00279     // Warm up the model before starting the real experiment; this is done
00280     // after setting the training stats collectors into everybody...
00281     if (warmup_size > 0)
00282         warmupModel(warmup_size);
00283 
00284     // Create all VMatrix related to saving statistics
00285     if (report_stats)
00286         createStatVMats();
00287 
00288     // Final model initialization before the test
00289     setTestStartTime(init_train_size, true /* call_build */);
00290   
00291     VMat test_outputs;
00292     VMat test_costs;
00293     if (save_test_outputs)
00294         test_outputs = new FileVMatrix(expdir / "test_outputs.pmat",0,
00295                                        learner->outputsize());
00296     if (save_test_costs)
00297         test_costs = new FileVMatrix(expdir / "test_costs.pmat",0,
00298                                      learner->getTestCostNames());
00299 
00300     // Some further initializations
00301     int maxt = (last_test_time >= 0? last_test_time : maxTimeStep() - 1);
00302     int splitnum = 0;
00303     output.resize(learner->outputsize());
00304     costs.resize(learner->nTestCosts());
00305     for (int t=init_train_size; t <= maxt; t++, splitnum++)
00306     {
00307 #ifdef DEBUG
00308         cout << "SequentialValidation::run() -- sub_train.length = " << t << " et sub_test.length = " << t+horizon << endl;
00309 #endif
00310         if (report_memory_usage)
00311             reportMemoryUsage(t);
00312 
00313         // Create splitdirs
00314         PPath splitdir = expdir / "test_t="+tostring(t);
00315         if (save_data_sets                 ||
00316             save_initial_model             ||
00317             save_stat_collectors           ||
00318             save_final_model               ||
00319             measure_after_train.size() > 0 ||
00320             measure_after_test.size()  > 0  )
00321             force_mkdir(splitdir);
00322     
00323         // Ensure a first train and, afterwards, train only if we arrive at an allowed
00324         // training time-step
00325         if ( t == init_train_size || shouldTrain(t)) {
00326             // Compute training set.  Don't compute test set right away in case
00327             // it's a complicated structure that cannot co-exist with an
00328             // instantiated training set
00329             VMat sub_train = trainVMat(t);
00330             if (save_data_sets)
00331                 PLearn::save(splitdir / "training_set.psave", sub_train);
00332             if (save_initial_model)
00333                 PLearn::save(splitdir / "initial_learner.psave",learner);
00334 
00335             // Perform train
00336             trainLearners(sub_train);
00337       
00338             // Save post-train stuff
00339             if (save_stat_collectors)
00340                 PLearn::save(splitdir / "train_stats.psave",train_stats);
00341             if (save_final_model)
00342                 PLearn::save(splitdir / "final_learner.psave",learner);
00343             measureOptions(measure_after_train, splitdir);
00344         }
00345 
00346         // TEST: simply use computeOutputAndCosts for 1 observation in this
00347         // implementation
00348         VMat sub_test = testVMat(t);
00349         testLearners(sub_test);
00350     
00351         // Save what is required from the test run
00352         if (save_data_sets)
00353             PLearn::save(splitdir / "test_set.psave", sub_test);
00354         if (test_outputs)
00355             test_outputs->appendRow(output);
00356         if (test_costs)
00357             test_costs->appendRow(costs);
00358         if (save_stat_collectors)
00359             PLearn::save(splitdir / "test_stats.psave",test_stats);
00360         measureOptions(measure_after_test, splitdir);
00361 
00362         const int nstats = statnames.size();
00363         Vec splitres(1+nstats);
00364         splitres[0] = splitnum;
00365 
00366         // Compute statnames for this split only
00367         for(int k=0; k<nstats; k++)
00368         {
00369             StatSpec& sp = statspecs[k];
00370             if (sp.setnum>=stcol.length())
00371                 PLERROR("SequentialValidation::run, trying to access a test set (test%d) beyond the last one (test%d)",
00372                         sp.setnum, stcol.length()-1);
00373             splitres[k+1] = stcol[sp.setnum]->getStat(sp.intstatname);
00374         }
00375 
00376         if (split_stats_vm)
00377             split_stats_vm->appendRow(splitres);
00378 
00379         // Add to overall stats collector
00380         sequence_stats->update(splitres.subVec(1,nstats));
00381 
00382         // Now compute timewise statnames.  First loop is on the inner
00383         // statistics; then update the stats collector; then loop on the outer
00384         // statistics
00385         if (timewise_stats_vm) {
00386             const int timewise_nstats = timewise_statnames.size();
00387             Vec timewise_res(timewise_nstats);
00388             for (int k=0; k<timewise_nstats; ++k) {
00389                 StatSpec& sp = timewise_statspecs[k];
00390                 if (sp.setnum>=stcol.length())
00391                     PLERROR("SequentialValidation::run, trying to access a test set "
00392                             "(test%d) beyond the last one (test%d)",
00393                             sp.setnum, stcol.length()-1);
00394                 timewise_res[k] = stcol[sp.setnum]->getStat(sp.intstatname);
00395             }
00396             timewise_stats->update(timewise_res);
00397             for (int k=0; k<timewise_nstats; ++k)
00398                 timewise_res[k] =
00399                     timewise_stats->getStats(k).getStat(timewise_statspecs[k].extstat);
00400             timewise_stats_vm->appendRow(timewise_res);
00401         }
00402     }
00403 
00404     sequence_stats->finalize();
00405 
00406     const int nstats = statnames.size();
00407     Vec global_result(nstats);
00408     for (int k=0; k<nstats; k++)
00409         global_result[k] = sequence_stats->getStats(k).getStat(statspecs[k].extstat);
00410 
00411     if (global_stats_vm)
00412         global_stats_vm->appendRow(global_result);
00413   
00414     reportStats(global_result);
00415 }
00416 
00417 void SequentialValidation::warmupModel(int warmup_size)
00418 {
00419     PLASSERT( warmup_size < init_train_size );
00420     setTestStartTime(init_train_size - warmup_size, true /* call_build */);
00421   
00422     for (int t = init_train_size-warmup_size ; t<init_train_size ; ++t) {
00423         VMat sub_train = trainVMat(t);           // train
00424         trainLearners(sub_train);
00425 
00426         VMat sub_test = testVMat(t);             // test
00427         testLearners(sub_test);
00428     }
00429 }
00430 
00431 void SequentialValidation::setTestStartTime(int test_start_time, bool call_build)
00432 {
00433     // Ensure correct build of learner and reset internal state.  We call
00434     // setTestStartTime TWICE, because some learners need it before build,
00435     // and because other learners, such as SequentialSelector-types, will not
00436     // have finished to construct the complete structure of sub-learners
00437     // until AFTER build, and we want the setTestStartTime() message to
00438     // propagate to everybody.
00439 
00440     PLASSERT( test_start_time > 0 );
00441   
00442     // Start with the accessory learners
00443     for (int a=0, n=accessory_learners.length() ; a<n ; ++a ) {
00444         if (call_build) {
00445             accessory_learners[a]->setTestStartTime(test_start_time);
00446             accessory_learners[a]->build();
00447         }
00448         accessory_learners[a]->setTestStartTime(test_start_time);
00449         accessory_learners[a]->resetInternalState();
00450     }
00451 
00452     // And now the main learner
00453     if (call_build) {
00454         learner->setTestStartTime(test_start_time);
00455         learner->build();
00456     }
00457     learner->setTestStartTime(test_start_time);
00458     learner->resetInternalState();
00459 }
00460 
00461 void SequentialValidation::setExperimentDirectory(const PPath& _expdir)
00462 {
00463     expdir = _expdir;
00464     if(provide_learner_expdir)
00465         learner->setExperimentDirectory(expdir / "Model");
00466 }
00467 
00468 void SequentialValidation::reportStats(const Vec& global_result)
00469 {
00470     if (!report_stats)
00471         return;
00472   
00473     saveAscii(expdir+"global_result.avec", global_result);
00474 //  saveAscii(expdir+"predictions.amat", learner->predictions);
00475 //  saveAscii(expdir+"errors.amat", learner->errors, learner->getTestCostNames());
00476 }
00477 
00478 void SequentialValidation::reportMemoryUsage(int t)
00479 {
00480     pid_t pid = getpid();
00481     char t_str[100];
00482     sprintf(t_str, "%05d", t);
00483 
00484     string memdir = append_slash(expdir) + "MemoryUsage";
00485     string method1 = string("cat /proc/")+tostring(pid)+"/status > "
00486         + memdir + "/status_" + t_str;
00487     string method2 = string("mem_usage ")+tostring(pid)+" > "
00488         + memdir + "/mem_usage_" + t_str;
00489 
00490     system(method1.c_str());
00491     system(method2.c_str());
00492 }
00493 
00494 bool SequentialValidation::shouldTrain(int t)
00495 {
00496     if ( train_step <= 0 )
00497         return false;
00498 
00499     return (t - init_train_size) % train_step == 0;
00500 }
00501 
00502 VMat SequentialValidation::trainVMat(int t)
00503 {
00504     // exclude t, last training pair is (t-2,t-1)
00505     PLASSERT( dataset );
00506     return dataset.subMatRows(0,t);
00507 }
00508 
00509 VMat SequentialValidation::testVMat(int t)
00510 {
00511     PLASSERT( dataset );
00512     return dataset.subMatRows(0,t+1);
00513 }
00514 
00515 int SequentialValidation::maxTimeStep() const
00516 {
00517     PLASSERT( dataset );
00518     return dataset.length();
00519 }
00520 
00521 void SequentialValidation::measureOptions(
00522     const TVec< pair<string,string> >& options, PPath where_to_save)
00523 {
00524     for (int i=0, n=options.size() ; i<n ; ++i) {
00525         const string& optionname = options[i].first;
00526         PPath filename = where_to_save / options[i].second;
00527         string optvalue = getOption(optionname);
00528         PStream out = openFile(filename, PStream::raw_ascii, "w");
00529         out << optvalue;
00530     }
00531 }
00532 
00533 void SequentialValidation::createStatCollectors()
00534 {
00535     // Always manage the accessory_learners first since they may be used
00536     // within the main trader.
00537     accessory_train_stats = new VecStatsCollector(); 
00538     for (int a=0, n=accessory_learners.length() ; a<n ; ++a)
00539         accessory_learners[a]->setTrainStatsCollector( accessory_train_stats );
00540   
00541     // stats for a train on one split
00542     stcol.resize(2);
00543     train_stats = new VecStatsCollector();
00544     train_stats->setFieldNames(learner->getTrainCostNames());
00545     learner->setTrainStatsCollector(train_stats);  
00546     stcol[0] = train_stats;
00547 
00548     // stats for a test on one split
00549     test_stats = new VecStatsCollector();
00550     test_stats->setFieldNames(learner->getTestCostNames());
00551     stcol[1] = test_stats;
00552 
00553     // stats over all sequence
00554     sequence_stats = new VecStatsCollector();
00555 
00556     // timewise stats (may not be used)
00557     timewise_stats = new VecStatsCollector();
00558 }
00559 
00560 void SequentialValidation::createStatSpecs()
00561 {
00562     // Stat specs (overall)
00563     const int nstats = statnames.length();
00564     statspecs.resize(nstats);
00565     for (int k=0; k<nstats; k++)
00566         statspecs[k].init(statnames[k]);
00567 
00568     // Stat specs (timewise)
00569     const int timewise_nstats = timewise_statnames.length();
00570     timewise_statspecs.resize(timewise_nstats);
00571     for (int k=0; k<timewise_nstats; ++k)
00572         timewise_statspecs[k].init(timewise_statnames[k]);
00573 }
00574 
00575 void SequentialValidation::createStatVMats()
00576 {
00577     TVec<string> traincostnames = learner->getTrainCostNames();
00578     TVec<string> testcostnames  = learner->getTestCostNames();
00579     const int nstats = statnames.size();
00580     const int timewise_nstats = timewise_statnames.size();
00581 
00582     saveStringInFile(expdir / "train_cost_names.txt", join(traincostnames,"\n")+"\n");
00583     saveStringInFile(expdir / "test_cost_names.txt",  join(testcostnames,"\n")+"\n");
00584 
00585     global_stats_vm = new FileVMatrix(expdir / "global_stats.pmat", 0, nstats);
00586     for(int k=0; k<nstats; k++)
00587         global_stats_vm->declareField(k,statspecs[k].statName());
00588     global_stats_vm->saveFieldInfos();
00589 
00590     if (save_sequence_stats) {
00591         split_stats_vm = new FileVMatrix(expdir+"sequence_stats.pmat", 0,
00592                                          1+nstats);
00593         split_stats_vm->declareField(0,"splitnum");
00594         for(int k=0; k<nstats; k++)
00595             split_stats_vm->declareField(k+1,statspecs[k].setname + "." + statspecs[k].intstatname);
00596         split_stats_vm->saveFieldInfos();
00597     }
00598 
00599     if (timewise_nstats > 0) {
00600         timewise_stats_vm = new FileVMatrix(expdir+"timewise_stats.pmat", 0,
00601                                             timewise_nstats);
00602         for (int k=0; k<timewise_nstats; ++k)
00603             timewise_stats_vm->declareField(k, timewise_statspecs[k].statName());
00604         timewise_stats_vm->saveFieldInfos();
00605     }
00606 }
00607 
00608 void SequentialValidation::trainLearners(VMat training_set)
00609 {
00610     for (int a=0, n=accessory_learners.length(); a<n ; ++a)
00611     {
00612         accessory_train_stats->forget();
00613         accessory_learners[a]->setTrainingSet(training_set, false);
00614         accessory_learners[a]->train();        
00615     }
00616     train_stats->forget();
00617     learner->setTrainingSet(training_set, false);
00618     learner->train();
00619     train_stats->finalize();  
00620 }
00621 
00622 void SequentialValidation::testLearners(VMat test_set)
00623 {
00624     real weight;
00625     test_set.getExample(test_set.length()-1, input, target, weight);
00626     for (int a=0, n=accessory_learners.length() ; a<n ; ++a )
00627     {
00628         accessory_learners[a]->setTestSet(test_set);         // temporary hack
00629         accessory_learners[a]->computeOutputAndCosts(input, target,
00630                                                      dummy_output, dummy_costs);
00631     }
00632     test_stats->forget();
00633     learner->setTestSet(test_set);           // temporary hack
00634     learner->computeOutputAndCosts(input, target, output, costs);
00635     test_stats->update(costs);
00636     test_stats->finalize();
00637 }
00638 
00639 void SequentialValidation::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00640 {
00641     inherited::makeDeepCopyFromShallowCopy(copies);
00642 
00643     deepCopyField(train_stats,            copies);
00644     deepCopyField(accessory_train_stats,  copies);
00645     deepCopyField(test_stats,             copies);
00646     deepCopyField(sequence_stats,         copies);
00647     deepCopyField(timewise_stats,         copies);
00648     deepCopyField(stcol,                  copies);
00649     deepCopyField(statspecs,              copies);
00650     deepCopyField(timewise_statspecs,     copies);
00651     deepCopyField(global_stats_vm,        copies);
00652     deepCopyField(split_stats_vm,         copies);
00653     deepCopyField(timewise_stats_vm,      copies); 
00654     deepCopyField(input,                  copies);
00655     deepCopyField(target,                 copies);
00656     deepCopyField(dummy_output,           copies);
00657     deepCopyField(dummy_costs,            copies);
00658     deepCopyField(output,                 copies);
00659     deepCopyField(costs,                  copies);
00660   
00661     deepCopyField(dataset,                copies);
00662     deepCopyField(learner,                copies);
00663     deepCopyField(accessory_learners,     copies);  
00664     deepCopyField(statnames,              copies);
00665     deepCopyField(timewise_statnames,     copies);
00666     deepCopyField(measure_after_train,    copies);
00667     deepCopyField(measure_after_test,     copies);
00668 }
00669 
00670 
00671 } // end of namespace PLearn
00672 
00673 
00674 /*
00675   Local Variables:
00676   mode:c++
00677   c-basic-offset:4
00678   c-file-style:"stroustrup"
00679   c-file-offsets:((innamespace . 0)(inline-open . 0))
00680   indent-tabs-mode:nil
00681   fill-column:79
00682   End:
00683 */
00684 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines