PLearn 0.1
|
00001 // -*- C++ -*-4 1999/10/29 20:41:34 dugas 00002 00003 // Learner.h 00004 // 00005 // Copyright (C) 1998-2002 Pascal Vincent 00006 // Copyright (C) 1999-2002 Yoshua Bengio, Nicolas Chapados, Charles Dugas, Rejean Ducharme, Universite de Montreal 00007 // Copyright (C) 2001,2002 Francis Pieraut, Jean-Sebastien Senecal 00008 // Copyright (C) 2002 Frederic Morin, Xavier Saint-Mleux, Julien Keable 00009 // 00010 // Redistribution and use in source and binary forms, with or without 00011 // modification, are permitted provided that the following conditions are met: 00012 // 00013 // 1. Redistributions of source code must retain the above copyright 00014 // notice, this list of conditions and the following disclaimer. 00015 // 00016 // 2. Redistributions in binary form must reproduce the above copyright 00017 // notice, this list of conditions and the following disclaimer in the 00018 // documentation and/or other materials provided with the distribution. 00019 // 00020 // 3. The name of the authors may not be used to endorse or promote 00021 // products derived from this software without specific prior written 00022 // permission. 00023 // 00024 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00025 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00026 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00027 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00028 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00029 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00030 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00031 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00032 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00033 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00034 // 00035 // This file is part of the PLearn library. For more information on the PLearn 00036 // library, go to the PLearn Web site at www.plearn.org 00037 00038 00039 00040 00041 /* ******************************************************* 00042 * $Id: Learner.h 9444 2008-09-09 19:28:56Z nouiz $ 00043 ******************************************************* */ 00044 00045 00046 00047 #ifndef Learner_INC 00048 #define Learner_INC 00049 00050 #include <plearn/measure/Measurer.h> 00051 #include <plearn/ker/Kernel.h> 00052 #include <plearn/math/VecStatsCollector.h> 00053 #include <plearn/math/StatsIterator.h> 00054 #include <plearn/vmat/VVec.h> 00055 //#include "TimeMeasurer.h" 00056 00057 namespace PLearn { 00058 using namespace std; 00059 00073 class Learner: public Object, public Measurer//, public TimeMeasurer 00074 { 00075 protected: 00076 00077 Vec tmpvec; // for temporary storage. 00078 00079 // EN FAIRE UN POINTEUR AUSSI 00080 ofstream* train_objective_stream; 00081 Array<ofstream*> test_results_streams; 00082 00083 private: 00084 00085 static Vec tmp_input; // temporary input vec 00086 static Vec tmp_target; // temporary target vec 00087 static Vec tmp_weight; // temporary example weight vec 00088 static Vec tmp_output; // temporary output vec 00089 static Vec tmp_costs; // temporary costs vec 00090 00091 protected: 00092 00094 void openTrainObjectiveStream(); 00095 00098 ostream& getTrainObjectiveStream(); 00099 00101 void openTestResultsStreams(); 00102 00105 ostream& getTestResultsStream(int k); 00106 00108 void freeTestResultsStreams(); 00109 00111 void outputResultLineToFile(const string & filename, const Vec& results,bool append,const string& names); 00112 00113 protected: 00116 string expdir; 00117 00119 int epoch_; 00120 00124 bool distributed_; 00125 00126 00127 public: 00128 00130 string basename() const; 00131 00132 typedef Object inherited; 00133 00134 int inputsize_; 00135 int targetsize_; 00136 int outputsize_; 00137 int weightsize_; 00138 00141 bool dont_parallelize; 00142 00144 //oassignstream testout; 00145 PStream testout; 00146 int test_every; 00147 Vec avg_objective; 00148 Vec avgsq_objective; 00149 VMat train_set; 00150 Array<VMat> test_sets; 00151 int minibatch_size; 00152 00157 int report_test_progress_every; 00158 00161 Vec options; 00162 00164 int earlystop_testsetnum; 00165 int earlystop_testresultindex; 00166 real earlystop_max_degradation; 00167 real earlystop_min_value; 00168 real earlystop_min_improvement; 00169 bool earlystop_relative_changes; 00170 bool earlystop_save_best; 00171 int earlystop_max_degraded_steps; 00172 00173 bool save_at_every_epoch; 00174 bool save_objective; 00175 int best_step; 00176 00177 protected: 00179 real earlystop_previousval; 00180 public: 00181 real earlystop_minval; 00182 00183 // DPERECATED. Please use the expdir system from now on, through setExperimentDirectory 00184 string experiment_name; 00185 00186 protected: 00187 //strstream earlystop_best_model; //!< string stream where the currently best model is saved 00188 00190 Array<Measurer*> measurers; 00191 00192 bool measure_cpu_time_first; // the first el. in measure(..) will be cpu time instead of courant step 00193 00194 bool each_cpu_saves_its_errors; 00195 public: 00196 Array<CostFunc> test_costfuncs; 00197 StatsItArray test_statistics; 00198 00199 static int use_file_if_bigger; 00200 00201 00202 00203 static bool force_saving_on_all_processes; 00204 00205 static PStream& /*oassignstream&*/ default_vlog(); 00206 //oassignstream vlog; //!< The log stream to which all the verbose output from this learner should be sent 00207 //oassignstream objectiveout; //!< The log stream to use to record the objective function during training 00208 PStream vlog; 00209 PStream objectiveout; 00210 00217 Learner(int the_inputsize=0, int the_targetsize=0, int the_outputsize=0); 00218 00219 virtual ~Learner(); 00220 00223 00229 virtual void setExperimentDirectory(const PPath& the_expdir); 00230 string getExperimentDirectory() const { return expdir; } 00231 00234 PLEARN_DECLARE_ABSTRACT_OBJECT(Learner); 00235 virtual void makeDeepCopyFromShallowCopy(CopiesMap& copies); 00236 00237 private: 00247 void build_(); 00248 00249 public: 00252 virtual void build(); 00253 00255 virtual void setTrainingSet(VMat training_set) { train_set = training_set; } 00256 inline VMat getTrainingSet() { return train_set; } 00257 00264 virtual void train(VMat training_set) =0; 00265 00266 00271 virtual void newtrain(VecStatsCollector& train_stats); 00272 00273 00276 virtual void newtest(VMat testset, VecStatsCollector& test_stats, 00277 VMat testoutputs=0, VMat testcosts=0); 00278 00279 /* 00280 virtual void useAndCost(Vec input, Vec target, Vec output, Vec cost) 00281 00282 virtual void trainTest(VMat train, Array<VMat> testsets); 00283 virtual void trainKFold(VMat trainset, int k); 00284 virtual void trainBootstrap(VMat trainset, int k, Array<VMat> testsets); 00285 virtual void trainSequential(VMat dataset, sequence_spec); 00286 00287 */ 00288 00293 virtual void train(VMat training_set, VMat accept_prob, 00294 real max_accept_prob=1.0, VMat weights=VMat()) 00295 { PLERROR("This method is not implemented for this learner"); } 00296 00302 virtual void use(const Vec& input, Vec& output) =0; 00303 virtual void use(const Mat& inputs, Mat outputs) 00304 { 00305 for (int i=0;i<inputs.length();i++) 00306 { 00307 Vec input = inputs(i); 00308 Vec output = outputs(i); 00309 use(input,output); 00310 } 00311 } 00312 00314 Vec vec_input; 00315 00318 // NOTE: For backward compatibility, default version currently calls 00319 // deprecated method use which should ultimately be removed... 00320 virtual void computeOutput(const VVec& input, Vec& output); 00321 00325 // NOTE: For backward compatibility, default version currently calls the 00326 // deprecated method computeCost which should ultimately be removed... 00327 virtual void computeCostsFromOutputs(const VVec& input, const Vec& output, 00328 const VVec& target, const VVec& weight, 00329 Vec& costs); 00330 00331 00335 virtual void computeOutputAndCosts(const VVec& input, VVec& target, const VVec& weight, 00336 Vec& output, Vec& costs); 00337 00341 virtual void computeCosts(const VVec& input, VVec& target, VVec& weight, 00342 Vec& costs); 00343 00344 00348 virtual void setModel(const Vec& new_options); 00349 00356 virtual void forget(); 00357 00379 virtual bool measure(int step, const Vec& costs); 00380 00388 virtual void oldwrite(ostream& out) const; 00389 /* TODO Remove (deprecated) 00390 virtual void oldread(istream& in); 00391 */ 00392 00394 void save(const PPath& filename="") const; 00396 void load(const PPath& filename=""); 00397 00402 virtual void stop_if_wanted(); 00403 00405 inline int inputsize() const { return inputsize_; } 00406 inline int targetsize() const { return targetsize_; } 00407 inline int outputsize() const { return outputsize_; } 00408 inline int weightsize() const { return weightsize_; } 00409 inline int epoch() const { return epoch_; } 00410 00414 virtual int costsize() const; 00415 00418 void setTestCostFunctions(Array<CostFunc> costfunctions) 00419 { test_costfuncs = costfunctions; } 00420 00423 void setTestStatistics(StatsItArray statistics) 00424 { test_statistics = statistics; } 00425 00428 virtual void setTestDuringTrain(ostream& testout, int every, 00429 Array<VMat> testsets); 00430 00432 virtual void setTestDuringTrain(Array<VMat> testsets); 00433 00434 00436 const Array<VMat>& getTestDuringTrain() const { 00437 return test_sets; 00438 } 00439 00440 00457 void setEarlyStopping(int which_testset, int which_testresult, 00458 real max_degradation, real min_value=-FLT_MAX, 00459 real min_improvement=0, bool relative_changes=true, 00460 bool save_best=true, int max_degraded_steps=-1); 00461 00469 virtual void computeCost(const Vec& input, const Vec& target, const Vec& output, const Vec& cost); 00470 00473 virtual void useAndCost(const Vec& input, const Vec& target, 00474 Vec output, Vec cost); 00475 00481 virtual void useAndCostOnTestVec(const VMat& test_set, int i, const Vec& output, const Vec& cost); 00482 00489 virtual void apply(const VMat& data, VMat outputs); 00490 00495 virtual void applyAndComputeCosts(const VMat& data, VMat outputs, VMat costs); 00496 00500 virtual void applyAndComputeCostsOnTestMat(const VMat& test_set, int i, const Mat& output_block, 00501 const Mat& cost_block); 00502 00507 virtual void computeCosts(const VMat& data, VMat costs); 00508 00511 virtual void computeLeaveOneOutCosts(const VMat& data, VMat costs); 00512 00517 virtual void computeLeaveOneOutCosts(const VMat& data, VMat costsmat, CostFunc costf); 00518 00524 Vec computeTestStatistics(const VMat& costs); 00525 00530 virtual Vec test(VMat test_set, const string& save_test_outputs="", 00531 const string& save_test_costs=""); 00532 00537 virtual Array<string> costNames() const; 00538 00544 virtual Array<string> testResultsNames() const; 00545 00549 virtual Array<string> trainObjectiveNames() const; 00550 00555 void appendMeasurer(Measurer& measurer) 00556 { measurers.append(&measurer); } 00557 00558 protected: 00559 static void declareOptions(OptionList& ol); 00560 00561 void setTrainCost(Vec &cost) 00562 { train_cost.resize(cost.length()); train_cost << cost; }; 00563 Vec train_cost; 00564 public: 00565 Vec getTrainCost() { return train_cost; }; 00566 }; 00567 00568 DECLARE_OBJECT_PTR(Learner); 00569 00570 typedef PP<Learner> PPLearner; 00571 00572 inline void prettyprint_test_results(ostream& out, const Learner& learner, const Vec& results) 00573 { 00574 Array<string> names = learner.testResultsNames(); 00575 for (int i=0; i<names.size(); i++) 00576 out << names[i] << ": " << results[i] << endl; 00577 } 00578 00579 00580 } // end of namespace PLearn 00581 00582 #endif 00583 00584 00585 /* 00586 Local Variables: 00587 mode:c++ 00588 c-basic-offset:4 00589 c-file-style:"stroustrup" 00590 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00591 indent-tabs-mode:nil 00592 fill-column:79 00593 End: 00594 */ 00595 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :