PLearn 0.1
HyperOptimize.cc
Go to the documentation of this file.
00001 
00002 // -*- C++ -*-
00003 
00004 // HyperOptimize.cc
00005 //
00006 // Copyright (C) 2003-2006 ApSTAT Technologies Inc.
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 //
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 //
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 //
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 //
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 //
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 // Author: Pascal Vincent
00037 // Documentation: Nicolas Chapados
00038 
00039 
00040 /* *******************************************************
00041  * $Id: HyperOptimize.cc 9970 2009-02-27 21:24:14Z nouiz $
00042  ******************************************************* */
00043 
00045 #include "HyperOptimize.h"
00046 #include "HyperLearner.h"
00047 #include <plearn/io/load_and_save.h>
00048 #include <plearn/base/stringutils.h>
00049 #include <plearn/vmat/FileVMatrix.h>
00050 #include <plearn/vmat/MemoryVMatrix.h>
00051 #include <plearn/sys/Profiler.h>
00052 
00053 namespace PLearn {
00054 using namespace std;
00055 
00056 PLEARN_IMPLEMENT_OBJECT(
00057     HyperOptimize,
00058     "Carry out an hyper-parameter optimization according to an Oracle",
00059     "HyperOptimize is part of a sequence of HyperCommands (specified within an\n"
00060     "HyperLearner) to optimize a validation cost over settings of\n"
00061     "hyper-parameters provided by an Oracle.  [NOTE: The \"underlying learner\" is\n"
00062     "the PLearner object (specified within the enclosing HyperLearner) whose\n"
00063     "hyper-parameters we are trying to optimize.]\n"
00064     "\n"
00065     "The sequence of steps followed by HyperOptimize is as follows:\n"
00066     "\n"
00067     "- 1) Gather a \"trial\" from an Oracle.  A \"trial\" is a full setting of\n"
00068     "  hyperparameters (option name/value pairs) that the underlying learner\n"
00069     "  should be trained with.\n"
00070     "\n"
00071     "- 2) Set the options within the underlying learner that correspond to\n"
00072     "  the current trial.\n"
00073     "\n"
00074     "- 3) Train and test the underlying learner.  The tester used for this\n"
00075     "  purpose is a PTester specified in the enclosing HyperLearner.  By\n"
00076     "  default, we rely on that PTester's Splitter as well; however, an\n"
00077     "  overriding Splitter may be specified within the HyperCommand.\n"
00078     "\n"
00079     "- 4) After training/testing, measure the cost to optimize, given by the\n"
00080     "  'which_cost' option.  This specifies an index into the test statistics\n"
00081     "  given by the 'statnames' option in PTester.  The measured cost gives\n"
00082     "  the performance of the current trial, i.e. how well does perform the\n"
00083     "  current setting of hyper-parameters.\n"
00084     "\n"
00085     "- 5) Repeat steps 1-4 until the Oracle tells us \"no more trials\".\n"
00086     "\n"
00087     "- 6) Find the best setting of hyper-parameters among all those tried.\n"
00088     "  (\"best\" defined as that which minimises the cost measured in Step 4).\n"
00089     "\n"
00090     "- 7) Set the underlying learner within the enclosing HyperLearner to be\n"
00091     "  the BEST ONE found in Step 6.\n"
00092     "\n"
00093     "Optionally, instead of a plain Train/Test in Step 3, a SUB-STRATEGY may be\n"
00094     "invoked.  This can be viewed as a \"sub-routine\" for hyperoptimization and\n"
00095     "can be used to implement a form of conditioning: given the current setting\n"
00096     "for hyper-parameters X,Y,Z, find the best setting of hyper-parameters\n"
00097     "T,U,V.  The most common example is for doing early-stopping when training a\n"
00098     "neural network: a first-level HyperOptimize command can use an\n"
00099     "ExplicitListOracle to jointly optimize over weight-decays and the number of\n"
00100     "hidden units.  A sub-strategy can then be used with an EarlyStoppingOracle\n"
00101     "to find the optimal number of training stages (epochs) for each combination\n"
00102     "of weight-decay/hidden units.\n"
00103     "\n"
00104     "Note that after optimization, the matrix of all trials is available through\n"
00105     "the option 'resultsmat' (which is declared as nosave).  This is available\n"
00106     "even if no expdir has been declared.\n"
00107     );
00108 
00109 
00110 HyperOptimize::HyperOptimize()
00111     : best_objective(REAL_MAX),
00112       trialnum(0),
00113       auto_save_timer(new PTimer()),
00114       which_cost_pos(-1),
00115       which_cost(),
00116       min_n_trials(0),
00117       provide_tester_expdir(false),
00118       rerun_after_sub(false),
00119       provide_sub_expdir(true),
00120       save_best_learner(false),
00121       auto_save(0),
00122       auto_save_test(0),
00123       auto_save_diff_time(3*60*60)
00124 { }
00125 
00127 // declareOptions //
00129 void HyperOptimize::declareOptions(OptionList& ol)
00130 {
00131     declareOption(
00132         ol, "which_cost", &HyperOptimize::which_cost, OptionBase::buildoption,
00133         "An index or a name in the tester's statnames to be used as the"
00134         " objective cost to minimize. If the index <0, we will take the last"
00135         " learner as the best.");
00136 
00137     declareOption(
00138         ol, "min_n_trials", &HyperOptimize::min_n_trials, OptionBase::buildoption,
00139         "Minimum nb of trials before saving best model");
00140 
00141     declareOption(
00142         ol, "oracle", &HyperOptimize::oracle, OptionBase::buildoption,
00143         "Oracle to interrogate to get hyper-parameter values to try.");
00144 
00145     declareOption(
00146         ol, "provide_tester_expdir", &HyperOptimize::provide_tester_expdir, OptionBase::buildoption,
00147         "Should the tester be provided with an expdir for each option combination to test");
00148 
00149     declareOption(
00150         ol, "sub_strategy", &HyperOptimize::sub_strategy, OptionBase::buildoption,
00151         "Optional sub-strategy to optimize other hyper-params (for each combination given by the oracle)");
00152 
00153     declareOption(
00154         ol, "rerun_after_sub", &HyperOptimize::rerun_after_sub, OptionBase::buildoption,
00155         "If this is true, a new evaluation will be performed after executing the sub-strategy, \n"
00156         "using this HyperOptimizer's splitter and which_cost. \n"
00157         "This is useful if the sub_strategy optimizes a different cost, or uses different splitting.\n");
00158 
00159     declareOption(
00160         ol, "provide_sub_expdir", &HyperOptimize::provide_sub_expdir, OptionBase::buildoption,
00161         "Should sub_strategy commands be provided an expdir");
00162 
00163     declareOption(
00164         ol, "save_best_learner", &HyperOptimize::save_best_learner,
00165         OptionBase::buildoption,
00166         "If true, the best learner at any step will be saved in the\n"
00167         "strategy expdir, as 'current_best_learner.psave'.");
00168 
00169     declareOption(
00170         ol, "splitter", &HyperOptimize::splitter, OptionBase::buildoption,
00171         "If not specified, we'll use default splitter specified in the hyper-learner's tester option");
00172 
00173     declareOption(
00174         ol, "auto_save", &HyperOptimize::auto_save, OptionBase::buildoption,
00175         "Save the hlearner and reload it if necessary.\n"
00176         "0 mean never, 1 mean always and >0 save iff trialnum%auto_save == 0.\n"
00177         "In the last case, it save after the last trial.\n"
00178         "See auto_save_diff_time as both condition must be true to save.\n");
00179 
00180     declareOption(
00181         ol, "auto_save_diff_time", &HyperOptimize::auto_save_diff_time,
00182         OptionBase::buildoption,
00183         "HyperOptimize::auto_save_diff_time is the mininum amount of time\n"
00184         "(in seconds) before the first save point, then between two\n"
00185         "consecutive save points.");
00186 
00187     declareOption(
00188         ol, "auto_save_test", &HyperOptimize::auto_save_test, OptionBase::buildoption,
00189         "exit after each auto_save. This is usefull to test auto_save.\n"
00190         "0 mean never, 1 mean always and >0 save iff trialnum%auto_save == 0");
00191 
00192     declareOption(
00193         ol, "resultsmat", &HyperOptimize::resultsmat,
00194         OptionBase::learntoption | OptionBase::nosave,
00195         "Gives access to the results of all trials during the last training.\n"
00196         "The last row lists the best results found and kept.  Note that this\n"
00197         "is declared 'nosave' and is intended for programmatic access by other\n"
00198         "functions through the getOption() mechanism. If an expdir is declared\n"
00199         "this matrix is available under the name 'results.pmat' in the expdir.");
00200     
00201     declareOption(ol, "best_objective", &HyperOptimize::best_objective,
00202                   OptionBase::learntoption,
00203                   "The best objective seen up to date.");
00204 
00205     declareOption(ol, "best_results", &HyperOptimize::best_results,
00206                   OptionBase::learntoption,
00207                   "The best result seen up to date." );
00208 
00209     declareOption(ol, "best_learner", &HyperOptimize::best_learner,
00210                   OptionBase::learntoption,
00211                   "A copy of the learner to the best learner seen up to date." );
00212 
00213     declareOption(ol, "trialnum", &HyperOptimize::trialnum,
00214                   OptionBase::learntoption, "The number of trial done." );
00215 
00216     declareOption(ol, "option_vals", &HyperOptimize::option_vals,
00217                   OptionBase::learntoption,"The option value to try." );
00218 
00219     // Now call the parent class' declareOptions
00220     inherited::declareOptions(ol);
00221 }
00222 
00224 // build_ //
00226 void HyperOptimize::build_()
00227 {
00228     Profiler::pl_profile_activate();
00229 }
00230 
00232 // build //
00234 void HyperOptimize::build()
00235 {
00236     inherited::build();
00237     build_();
00238 }
00239 
00240 void HyperOptimize::setExperimentDirectory(const PPath& the_expdir)
00241 {
00242     inherited::setExperimentDirectory(the_expdir);
00243     getResultsMat();    
00244 }
00245 
00246 void HyperOptimize::getResultsMat()
00247 {
00248     TVec<string> cost_fields = getResultNames();
00249     TVec<string> option_fields = hlearner->option_fields;
00250     int w = 2 + option_fields.length() + cost_fields.length();
00251 
00252     // If we have an expdir, create a FileVMatrix to save the results.
00253     // Otherwise, just a MemoryVMatrix to make the results available as a
00254     // getOption after training.
00255     if (! expdir.isEmpty())
00256     {
00257         string fname = expdir+"results.pmat";
00258         if(isfile(fname)){
00259             //we reload the old version if it exist
00260             resultsmat = new FileVMatrix(fname, true);
00261             if(resultsmat.width()!=w)
00262                 PLERROR("In HyperOptimize::getResultsMat() - The existing "
00263                         "results mat(%s) that we should reload don't have the "
00264                         "width that we need. Did you added some statnames?",
00265                         fname.c_str());
00266             return;
00267         }else
00268             resultsmat = new FileVMatrix(fname,0,w);
00269     }
00270     else
00271         resultsmat = new MemoryVMatrix(0,w);
00272 
00273     int j=0;
00274     resultsmat->declareField(j++, "_trial_");
00275     resultsmat->declareField(j++, "_objective_");
00276     for(int k=0; k<option_fields.length(); k++)
00277         resultsmat->declareField(j++, option_fields[k]);
00278     for(int k=0; k<cost_fields.length(); k++)
00279         resultsmat->declareField(j++, cost_fields[k]);
00280 
00281     if (! expdir.isEmpty())
00282         resultsmat->saveFieldInfos();
00283 }
00284 
00285 void HyperOptimize::reportResult(int trialnum,  const Vec& results)
00286 {
00287     if(expdir!="")
00288     {
00289         TVec<string> cost_fields = getResultNames();
00290         TVec<string> option_fields = hlearner->option_fields;
00291 
00292         if(results.length() != cost_fields.length())
00293             PLERROR("In HyperOptimize::reportResult - Length of results vector (%d) "
00294                     "differs from number of cost fields (%d)",
00295                     results.length(), cost_fields.length());
00296 
00297         // ex: _trial_ _objective_ nepochs nhidden ...     train_error
00298 
00299         Vec newres(resultsmat.width());
00300         int j=0;
00301         newres[j++] = trialnum;
00302         newres[j++] = which_cost_pos;
00303 
00304         for(int k=0; k<option_fields.length(); k++)
00305         {
00306             string optstr = hlearner->learner_->getOption(option_fields[k]);
00307             real optreal = toreal(optstr);
00308             if(is_missing(optreal)) // it's not directly a real: get a mapping for it
00309                 optreal = resultsmat->addStringMapping(k, optstr);
00310             newres[j++] = optreal;
00311         }
00312 
00313         for(int k=0; k<cost_fields.length(); k++)
00314             newres[j++] = results[k];
00315 
00316         resultsmat->appendRow(newres);
00317         resultsmat->flush();
00318     }
00319 }
00320 
00321 Vec HyperOptimize::runTest(int trialnum)
00322 {
00323     PP<PTester> tester = hlearner->tester;
00324 
00325     string testerexpdir = "";
00326     if(expdir!="" && provide_tester_expdir)
00327         testerexpdir = expdir / ("Trials"+tostring(trialnum)) / "";
00328     tester->setExperimentDirectory(testerexpdir);
00329 
00330     PP<Splitter> default_splitter = tester->splitter;
00331     if(splitter)  // set our own splitter
00332         tester->splitter = splitter;
00333 
00334     Vec results = tester->perform(false);
00335 
00337     tester->splitter = default_splitter;
00338     return results;
00339 }
00340 
00341 TVec<string> HyperOptimize::getResultNames() const
00342 {
00343     return hlearner->tester->getStatNames();
00344 }
00345 
00346 void HyperOptimize::forget()
00347 {
00348     trialnum = 0;    
00349     option_vals.resize(0);
00350     best_objective = REAL_MAX;
00351     best_results = Vec();
00352     best_learner = 0;
00353 
00354     for (int i=0, n=sub_strategy.size() ; i<n ; ++i)
00355         sub_strategy[i]->forget();
00356 }
00357 
00358 Vec HyperOptimize::optimize()
00359 {
00360 //in the case when auto_save is true. This function can be called even
00361 //if the optimisation is finished. We must not redo it in this case.
00362     if(trialnum>0&&!option_vals&&resultsmat.length()==trialnum+1){
00363         hlearner->setLearner(best_learner);
00364         if (!best_results.isEmpty() && resultsmat->get(resultsmat.length()-1,0)!=-1)
00365             reportResult(-1,best_results);
00366 
00367         return best_results;
00368     }
00369     TVec<string> option_names;
00370     option_names = oracle->getOptionNames();
00371 
00372     if(trialnum==0){
00373         if(option_vals.size()==0)
00374             option_vals = oracle->generateFirstTrial();
00375         if (option_vals.size() != option_names.size())
00376             PLERROR("HyperOptimize::optimize: the number (%d) of option values (%s) "
00377                     "does not match the number (%d) of option names (%s) ",
00378                     option_vals.size(), tostring(option_vals).c_str(),
00379                     option_names.size(), tostring(option_names).c_str());
00380     }
00381     which_cost_pos= getResultNames().find(which_cost);
00382     if(which_cost_pos < 0){
00383         if(!pl_islong(which_cost))
00384             PLERROR("In HyperOptimize::optimize() -  option 'which_cost' with "
00385                     "value '%s' is not a number and is not a valid result test name",
00386                     which_cost.c_str());
00387         which_cost_pos= toint(which_cost);
00388     }
00389 
00390     Vec results;
00391     while(option_vals)
00392     {
00393         auto_save_timer->startTimer("auto_save");
00394 
00395         if(verbosity>0) {
00396             // Print current option-value pairs in slightly comprehensible form
00397             string kv;
00398             for (int i=0, n=option_names.size() ; i<n ; ++i) {
00399                 kv += option_names[i] + '=' + option_vals[i];
00400                 if (i < n-1)
00401                     kv += ", ";
00402             }
00403             perr << "In HyperOptimize::optimize() - We optimize with "
00404                 "parameters " << kv << "\n";
00405         }
00406 
00407         // This will also call build and forget on the learner unless unnecessary
00408         // because the modified options don't require it.
00409         hlearner->setLearnerOptions(option_names, option_vals);
00410 
00411         if(sub_strategy)
00412         {
00413             Vec best_sub_results;
00414             for(int commandnum=0; commandnum<sub_strategy.length(); commandnum++)
00415             {
00416                 sub_strategy[commandnum]->setHyperLearner(hlearner);
00417                 sub_strategy[commandnum]->forget();
00418                 if(!expdir.isEmpty() && provide_sub_expdir)
00419                     sub_strategy[commandnum]->setExperimentDirectory(
00420                         expdir / ("Trials"+tostring(trialnum)) / ("Step"+tostring(commandnum))
00421                         );
00422                 
00423                 best_sub_results = sub_strategy[commandnum]->optimize();
00424             }
00425             if(rerun_after_sub)
00426                 results = runTest(trialnum);
00427             else
00428                 results = best_sub_results;
00429         }
00430         else
00431             results = runTest(trialnum);
00432 
00433         reportResult(trialnum,results);
00434         real objective = MISSING_VALUE;
00435         if (which_cost_pos>=0)
00436             objective = results[which_cost_pos];
00437         else
00438         {//The best is always the last
00439             best_objective = objective;
00440             best_results = results;
00441             best_learner = hlearner->getLearner();
00442         }
00443         option_vals = oracle->generateNextTrial(option_vals,objective);
00444 
00445         ++trialnum;
00446         if(!is_missing(objective) &&
00447            (objective < best_objective || best_results.length()==0) && (trialnum>=min_n_trials || !option_vals))
00448         {
00449             best_objective = objective;
00450             best_results = results;
00451             CopiesMap copies;
00452             best_learner = NULL;
00453             Profiler::pl_profile_start("HyperOptimizer::optimize::deepCopy");
00454             best_learner = hlearner->getLearner()->deepCopy(copies);
00455             Profiler::pl_profile_end("HyperOptimizer::optimize::deepCopy");
00456 
00457             if (save_best_learner && !expdir.isEmpty()) {
00458                 PLearn::save(expdir / "current_best_learner.psave",
00459                              best_learner);
00460             }
00461         }
00462 
00463         if(verbosity>1) {
00464             perr << "In HyperOptimize::optimize() - cost=" << which_cost
00465                  << " nb of trials="<<trialnum
00466                  << " Current value=" << objective << " Best value= "
00467                  << best_objective << endl;
00468         }
00469 
00470         auto_save_timer->stopTimer("auto_save");
00471         if (auto_save > 0 &&
00472                 (trialnum % auto_save == 0 || option_vals.isEmpty()))
00473         {
00474             int s = int(auto_save_timer->getTimer("auto_save"));
00475             if(s > auto_save_diff_time || option_vals.isEmpty()) {
00476                 hlearner->auto_save();
00477                 auto_save_timer->resetTimer("auto_save");
00478                 if(auto_save_test>0 && trialnum%auto_save_test==0)
00479                     PLERROR("In HyperOptimize::optimize() - auto_save_test is true,"
00480                             " exiting");
00481             }
00482         }
00483     }
00484 
00485     // Detect the case where no trials at all were performed!
00486     if (trialnum == 0)
00487         PLWARNING("In HyperOptimize::optimize - No trials at all were completed;\n"
00488                   "perhaps the oracle settings are wrong?");
00489 
00490     // revert to best_learner if one found.
00491     hlearner->setLearner(best_learner);
00492 
00493     if (best_results.isEmpty())
00494         // This could happen for instance if all results are NaN.
00495         PLWARNING("In HyperOptimize::optimize - Could not find a best result,"
00496                   " something must be wrong");
00497     else
00498         // report best result again, if not empty
00499         reportResult(-1,best_results);
00500 
00501     return best_results;
00502 }
00503 /*
00504 void HyperOptimize::launchTest(int trialnum, PP<RemotePLearnServer> server,
00505                                map<PP<RemotePLearnServer>, int>& testers_ids)
00506 {
00507     PP<PTester> tester= hlearner->tester;
00508 
00509     string testerexpdir= "";
00510     if(expdir!="" && provide_tester_expdir)
00511         testerexpdir = expdir / ("Trials"+tostring(trialnum)) / "";
00512     tester->setExperimentDirectory(testerexpdir);
00513 
00514     PP<Splitter> default_splitter = tester->splitter;
00515     if(splitter)  // set our own splitter
00516         tester->splitter = splitter;
00517 
00518     int id= testers_ids[server];
00519     if(id > 0) server->deleteObject(id);// delete prev. tester
00520     id= server->newObject(tester);// send new tester
00521     testers_ids[server]= id;
00522     tester->splitter= default_splitter;// restore default splitter
00523 
00524     server->callMethod(id, "perform", false);
00525 }
00526 
00527 Vec HyperOptimize::parOptimize()
00528 {
00529     real best_objective = REAL_MAX;
00530     Vec best_results;
00531     PP<PLearner> best_learner;
00532 
00533     TVec<string> option_names;
00534     option_names = oracle->getOptionNames();
00535 
00536     TVec<string> option_vals = oracle->generateFirstTrial();
00537     if (option_vals.size() != option_names.size())
00538         PLERROR("HyperOptimize::optimize: the number of option values (%d) "
00539                 "does not match the number of option names (%d)",
00540                 option_vals.size(), option_names.size());
00541 
00542     int trialnum = 0;
00543 
00544     which_cost_pos= getResultNames().find(which_cost);
00545     if(which_cost_pos < 0)
00546         which_cost_pos= toint(which_cost);
00547 
00548     PLearnService& service(PLearnService::instance());
00549     int nservers= service.availableServers();
00550     TVec<PP<RemotePLearnServer> > servers= service.reserveServers(nservers);
00551     nservers= servers.length();
00552     map<PP<RemotePLearnServer>, int> testers_ids;
00553     map<PP<RemotePLearnServer>, int> trialnums;
00554     for(int i= 0; i < nservers; ++i)
00555         testers_ids[servers[i]]= -1;//init.
00556     int nworking= 0;
00557 
00558     Vec results;
00559     bool finished= false;
00560     while(!finished)
00561     {
00562         map<PP<RemotePLearnServer>, int>::iterator it= testers_ids.find(-1);
00563         if(option_vals && it != testers_ids.end())
00564         {
00565             hlearner->setLearnerOptions(option_names, option_vals);
00566             launchTest(trialnum, it->first, testers_ids);
00567             ++nworking;
00568             trialnums[it->first]= trialnum;
00569             ++trialnum;
00570             option_vals= 0;
00571         }
00572         else if(nworking > 0)
00573         {
00574             PP<RemotePLearnServer> s= service.waitForResult();
00575             s->getResults(results);
00576             --nworking;
00577             testers_ids[s]= -1;
00578             reportResult(trialnums[s], results);
00579             real objective= results[which_cost_pos];
00580 
00581             option_vals= oracle->generateNextTrial(option_vals,objective);
00582             
00583             if(!is_missing(objective) &&
00584                (objective < best_objective || best_results.length()==0) && (trialnum>=min_n_trials || !option_vals))
00585             {
00586                 best_objective = objective;
00587                 best_results = results;
00588                 CopiesMap copies;
00589                 best_learner = NULL;
00590                 best_learner = hlearner->getLearner()->deepCopy(copies);
00591             }
00592         }
00593         else
00594             finished= true;
00595     }
00596 
00597     // Detect the case where no trials at all were performed!
00598     if (trialnum == 0)
00599         PLWARNING("In HyperOptimize::optimize - No trials at all were completed;\n"
00600                   "perhaps the oracle settings are wrong?");
00601 
00602     // revert to best_learner
00603     hlearner->setLearner(best_learner);
00604 
00605     if (best_results.isEmpty())
00606         // This could happen for instance if all results are NaN.
00607         PLWARNING("In HyperOptimize::optimize - Could not find a best result, something "
00608                   "must be wrong");
00609     else
00610         // report best result again, if not empty
00611         reportResult(-1,best_results);
00612 
00613     return best_results;
00614 }
00615 */
00616 
00618 // makeDeepCopyFromShallowCopy //
00620 void HyperOptimize::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00621 {
00622     inherited::makeDeepCopyFromShallowCopy(copies);
00623 
00624     deepCopyField(resultsmat,       copies);
00625     deepCopyField(best_results,     copies);
00626     deepCopyField(best_learner,     copies);
00627     deepCopyField(option_vals,      copies);
00628     deepCopyField(auto_save_timer,  copies);
00629     deepCopyField(oracle,           copies);
00630     deepCopyField(sub_strategy,     copies);
00631     deepCopyField(splitter,         copies);
00632 }
00633 
00634 } // end of namespace PLearn
00635 
00636 
00637 /*
00638   Local Variables:
00639   mode:c++
00640   c-basic-offset:4
00641   c-file-style:"stroustrup"
00642   c-file-offsets:((innamespace . 0)(inline-open . 0))
00643   indent-tabs-mode:nil
00644   fill-column:79
00645   End:
00646 */
00647 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines