PLearn 0.1
GradientOptimizer.h
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // PLearn (A C++ Machine Learning Library)
00004 // Copyright (C) 1998 Pascal Vincent
00005 // Copyright (C) 1999-2002 Pascal Vincent and Yoshua Bengio
00006 // Copyright (C) 1999-2002, 2006 University of Montreal
00007 //
00008 
00009 // Redistribution and use in source and binary forms, with or without
00010 // modification, are permitted provided that the following conditions are met:
00011 // 
00012 //  1. Redistributions of source code must retain the above copyright
00013 //     notice, this list of conditions and the following disclaimer.
00014 // 
00015 //  2. Redistributions in binary form must reproduce the above copyright
00016 //     notice, this list of conditions and the following disclaimer in the
00017 //     documentation and/or other materials provided with the distribution.
00018 // 
00019 //  3. The name of the authors may not be used to endorse or promote
00020 //     products derived from this software without specific prior written
00021 //     permission.
00022 // 
00023 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00024 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00025 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00026 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00027 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00028 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00029 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00030 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00031 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00032 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 // 
00034 // This file is part of the PLearn library. For more information on the PLearn
00035 // library, go to the PLearn Web site at www.plearn.org
00036 
00037 
00038 /* *******************************************************      
00039  * $Id: GradientOptimizer.h 8247 2007-11-12 20:22:12Z nouiz $
00040  * This file is part of the PLearn library.
00041  ******************************************************* */
00042 
00043 
00046 #ifndef GRADIENTOPTIMIZER_INC
00047 #define GRADIENTOPTIMIZER_INC
00048 
00049 #include "Optimizer.h"
00050 
00051 namespace PLearn {
00052 using namespace std;
00053 
00054 
00055 class GradientOptimizer : public Optimizer
00056 {
00057     typedef Optimizer inherited;
00058       
00059 public:
00060 
00063     real learning_rate; // current learning rate
00064 
00065     // Options (also available through setOption)
00066     real start_learning_rate;
00067     real decrease_constant;
00070     bool use_stochastic_hack;
00071 
00072     // optionally the user can instead of using the decrease_constant
00073     // use a fixed schedule. This matrix has 2 columns: iteration_threshold and learning_rate_factor
00074     // As soon as the iteration number goes above the iteration_threshold, the corresponding learning_rate_factor
00075     // is applied (multiplied) to the start_learning_rate to obtain the learning_rate.
00076     Mat lr_schedule;
00077 
00078     int verbosity;
00079     
00080     GradientOptimizer();
00081     /*(
00082     real the_start_learning_rate=0.01, 
00083                       real the_decrease_constant=0);
00084     */
00085 
00086     /*
00087     GradientOptimizer(VarArray the_params, Var the_cost,
00088                       real the_start_learning_rate=0.01, 
00089                       real the_decrease_constant=0,
00090                       int n_updates=1, const string& filename="", 
00091                       int every_iterations=1);
00092     GradientOptimizer(VarArray the_params, Var the_cost, 
00093                       VarArray update_for_measure,
00094                       real the_start_learning_rate=0.01, 
00095                       real the_decrease_constant=0,
00096                       int n_updates=1, const string& filename="", 
00097                       int every_iterations=1);
00098                       */
00099 
00100     PLEARN_DECLARE_OBJECT(GradientOptimizer);
00101 
00102     virtual void makeDeepCopyFromShallowCopy(CopiesMap& copies)
00103     { inherited::makeDeepCopyFromShallowCopy(copies); }
00104 
00105     virtual void build()
00106     {
00107         inherited::build();
00108         build_();
00109     }
00110 private:
00111     void build_()
00112     {}
00113     
00114 public:
00115 
00116     // virtual void oldwrite(ostream& out) const;
00117     // virtual void oldread(istream& in);
00118     //virtual real optimize();
00119     virtual bool optimizeN(VecStatsCollector& stats_coll);
00120 
00121 protected:
00122 
00123     static void declareOptions(OptionList& ol);
00124 };
00125 
00126 DECLARE_OBJECT_PTR(GradientOptimizer);
00127 
00128 /* Commented out the whole class: if one uses it, better put it in its own
00129  * separate file!
00130 class ScaledGradientOptimizer : public Optimizer
00131 {
00132 protected:
00133     Vec gradient;
00135     real short_time_mac; 
00137     real long_time_mac;
00139     Vec short_time_ma;
00141     Vec long_time_ma;
00143     Vec long_time_mv;
00145     Vec long_time_md;
00146 
00147 public:
00150     real start_learning_rate;
00151     real decrease_constant;
00152     real init_learning_rate;
00153     real learning_rate;
00154     Vec eps_scale; //!<  scaling parameter for the learning rate of each parameter
00155   
00156 protected:
00158     real regularizer;
00159   
00160 public:
00162     ScaledGradientOptimizer(VarArray the_params, Var the_cost,
00163                             real the_start_learning_rate=0.01, 
00164                             real the_decrease_constant=0.01,
00165                             real the_init_learning_rate=0.003,
00166                             int n_updates=1, 
00167                             real short_time_moving_avg_coef=0.01,
00168                             real long_time_moving_avg_coef=0.001,
00169                             real the_regularizer=1.0,
00170                             const string& filename="", 
00171                             int every_iterations=1)
00172         :Optimizer(the_params,the_cost, n_updates, filename, every_iterations),
00173          gradient(the_params.nelems()),
00174          short_time_mac(short_time_moving_avg_coef),
00175          long_time_mac(long_time_moving_avg_coef),
00176          short_time_ma(the_params.nelems()),
00177          long_time_ma(the_params.nelems()),
00178          long_time_mv(the_params.nelems()),
00179          long_time_md(the_params.nelems()),
00180          start_learning_rate(the_start_learning_rate),
00181          decrease_constant(the_decrease_constant),
00182          init_learning_rate(the_init_learning_rate),
00183          eps_scale(the_params.nelems()),
00184          regularizer(the_regularizer) {}
00185 
00186     
00187     ScaledGradientOptimizer(VarArray the_params, Var the_cost, 
00188                             real the_start_learning_rate=0.01, 
00189                             real the_decrease_constant=0.01,
00190                             int n_updates=1,
00191                             real short_time_moving_avg_coef=0.01,
00192                             real long_time_moving_avg_coef=0.001,
00193                             real the_regularizer=1.0,
00194                             const string& filename="", 
00195                             int every_iterations=1)
00196         :Optimizer(the_params,the_cost,
00197                    n_updates, filename, every_iterations),
00198          gradient(the_params.nelems()),
00199          short_time_mac(short_time_moving_avg_coef),
00200          long_time_mac(long_time_moving_avg_coef),
00201          short_time_ma(the_params.nelems()),
00202          long_time_ma(the_params.nelems()),
00203          long_time_mv(the_params.nelems()),
00204          long_time_md(the_params.nelems()),
00205          start_learning_rate(the_start_learning_rate),
00206          decrease_constant(the_decrease_constant),
00207          eps_scale(the_params.nelems()),
00208          regularizer(the_regularizer)
00209     {
00210         eps_scale.fill(1.0);
00211     }
00212                     
00213     virtual real optimize();
00214 };
00215 
00216 */
00217 
00218 
00219 } // end of namespace PLearn
00220 
00221 #endif
00222 
00223 
00224 /*
00225   Local Variables:
00226   mode:c++
00227   c-basic-offset:4
00228   c-file-style:"stroustrup"
00229   c-file-offsets:((innamespace . 0)(inline-open . 0))
00230   indent-tabs-mode:nil
00231   fill-column:79
00232   End:
00233 */
00234 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines