PLearn 0.1
Optimizer.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // PLearn (A C++ Machine Learning Library)
00004 // Copyright (C) 1998 Pascal Vincent
00005 // Copyright (C) 1999,2000 Pascal Vincent and Yoshua Bengio
00006 // Copyright (C) 2000, 2006 University of Montreal
00007 //
00008 
00009 // Redistribution and use in source and binary forms, with or without
00010 // modification, are permitted provided that the following conditions are met:
00011 // 
00012 //  1. Redistributions of source code must retain the above copyright
00013 //     notice, this list of conditions and the following disclaimer.
00014 // 
00015 //  2. Redistributions in binary form must reproduce the above copyright
00016 //     notice, this list of conditions and the following disclaimer in the
00017 //     documentation and/or other materials provided with the distribution.
00018 // 
00019 //  3. The name of the authors may not be used to endorse or promote
00020 //     products derived from this software without specific prior written
00021 //     permission.
00022 // 
00023 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00024 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00025 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00026 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00027 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00028 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00029 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00030 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00031 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00032 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 // 
00034 // This file is part of the PLearn library. For more information on the PLearn
00035 // library, go to the PLearn Web site at www.plearn.org
00036  
00037 
00038 /* *******************************************************      
00039  * $Id: Optimizer.cc 8862 2008-04-22 14:06:32Z tihocan $
00040  * This file is part of the PLearn library.
00041  ******************************************************* */
00042 
00043 #include "Optimizer.h"
00044 //#define DEBUGCG
00045 #ifdef DEBUGCG
00046 #include <plearn/display/GhostScript.h>
00047 #endif
00048 
00049 namespace PLearn {
00050 using namespace std;
00051 
00052 
00053 Optimizer::Optimizer():
00054     early_stop(false),
00055     nstages(1),
00056     stage(0)
00057 {}
00058 
00059 PLEARN_IMPLEMENT_ABSTRACT_OBJECT(
00060     Optimizer,
00061     "Base class for Optimization algorithms.",
00062     "In the PLearn context, optimizers operate on graph of Variable objects,\n"
00063     "mostly expressed in VarArray form.\n"
00064 );
00065 
00066 void Optimizer::build()
00067 {
00068     inherited::build();
00069     build_();
00070 }
00071 
00073 // build_ //
00075 void Optimizer::build_()
00076 {
00077     if (cost)
00078         setToOptimize(params, cost, other_costs, other_params, other_weight);
00079 }
00080 
00082 // reset //
00084 void Optimizer::reset()
00085 {
00086     stage = 0;
00087     early_stop = false;
00088 }
00089 
00091 // declareOptions //
00093 void Optimizer::declareOptions(OptionList& ol)
00094 {
00095     declareOption(ol, "nstages", &Optimizer::nstages, OptionBase::buildoption, 
00096         "Number of iterations to perform on the next call to optimizeN(..).");
00097 
00098     declareOption(ol, "early_stop", &Optimizer::early_stop,
00099                   OptionBase::learntoption, 
00100         "Whether an early stopping criterion has been met.");
00101 
00102     inherited::declareOptions(ol);
00103 }
00104 
00106 // declareMethods //
00108 void Optimizer::declareMethods(RemoteMethodMap& rmm)
00109 {
00110     // Insert a backpointer to remote methods; note that this
00111     // different than for declareOptions()
00112     rmm.inherited(inherited::_getRemoteMethodMap_());
00113         
00114     declareMethod(rmm, "setToOptimize", &Optimizer::remote_setToOptimize,
00115             (BodyDoc("Set cost to minimize with respect to given parameters"),
00116              ArgDoc("params", "List of parameters (variables) to optimize"),
00117              ArgDoc("cost", "Cost to be minimized")));
00118 
00119     declareMethod(rmm, "optimizeN", &Optimizer::remote_optimizeN,
00120             (BodyDoc("Launch nstages steps of optimization."),
00121              ArgDoc("stats", "VecStatsCollector to collect training statistics"),
00122              RetDoc("Boolean value indicating whether a stopping criterion "
00123                     "has been met.")));
00124 }
00125 
00127 // setToOptimize //
00129 void Optimizer::setToOptimize(const VarArray& the_params, Var the_cost, VarArray the_other_costs, TVec<VarArray> the_other_params, real the_other_weight)
00130 {
00131     params = the_params;//displayVarGraph(params, true, 333, "p1", false);
00132     cost = the_cost;//displayVarGraph(cost[0], true, 333, "c1", false);
00133     proppath = propagationPath(params,cost);//displayVarGraph(proppath, true, 333, "x1", false);
00134     VarArray path_from_all_sources_to_direct_parents = propagationPathToParentsOfPath(params, cost);
00135     path_from_all_sources_to_direct_parents.fprop();//displayVarGraph(path_from_all_sources_to_direct_parents, true, 333, "x1", false);
00136 
00137     // This is probably not complete. Maybe a 
00138     // path_from_all_sources_to_direct_parents should also be computed and fproped
00139     other_costs = the_other_costs;
00140     other_params = the_other_params;
00141     other_proppaths.resize(other_costs.length());
00142     for(int i=0; i<other_proppaths.length(); i++)
00143         other_proppaths[i] = propagationPath(other_params[i],other_costs[i]);
00144     other_weight = the_other_weight;
00145 }
00146 
00148 // remote_setToOptimize //
00150 void Optimizer::remote_setToOptimize(const VarArray& params, Var cost)
00151 {
00152     setToOptimize(params, cost);
00153 }
00154 
00155 /*
00156 void Optimizer::setVarArrayOption(const string& optionname, VarArray value)
00157 {
00158     if (optionname=="params") setToOptimize(value, cost);
00159     else if (optionname=="update_for_measure") update_for_measure = value;
00160     else PLERROR("In Optimizer::setVarArrayOption(const string& optionname, VarArray value): option not recognized (%s).",optionname.c_str());
00161 }
00162 
00163 void Optimizer::setVarOption(const string& optionname, Var value)
00164 {
00165     if (optionname=="cost") setToOptimize(params, value);
00166     else PLERROR("In Optimizer::setVarOption(const string& optionname, VarArray value): option not recognized (%s).",optionname.c_str());
00167 }
00168 
00169 void Optimizer::setVMatOption(const string& optionname, VMat value)
00170 {
00171     PLERROR("In Optimizer::setVMatOption(const string& optionname, VarArray value): option not recognized (%s).",optionname.c_str());
00172 }
00173 */
00174 
00175 
00177 #ifdef __INTEL_COMPILER
00178 #pragma warning(disable:1419)  // Get rid of compiler warning.
00179 #endif
00180 extern void varDeepCopyField(Var& field, CopiesMap& copies);
00181 #ifdef __INTEL_COMPILER
00182 #pragma warning(default:1419)
00183 #endif
00184 
00185 
00186 
00188 // makeDeepCopyFromShallowCopy //
00190 void Optimizer::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00191 {
00192     inherited::makeDeepCopyFromShallowCopy(copies);
00193     deepCopyField(params, copies);
00194     varDeepCopyField(cost, copies);
00195     deepCopyField(partial_update_vars, copies);
00196     deepCopyField(proppath, copies);
00197     deepCopyField(other_costs, copies);
00198     deepCopyField(other_params, copies);
00199     deepCopyField(other_proppaths, copies);
00200 }
00201 
00202 void Optimizer::verifyGradient(real minval, real maxval, real step)
00203 {
00204     Func f(params,cost);
00205     f->verifyGradient(minval, maxval, step);
00206 }
00207 
00208 void Optimizer::verifyGradient(real step)
00209 {
00210     Func f(params,cost);
00211     Vec p(params.nelems());
00212     params >> p;
00213     f->verifyGradient(p, step);
00214 }
00215 
00217 // computeRepartition //
00219 void Optimizer::computeRepartition(
00220     Vec v, int n, real mini, real maxi, 
00221     Vec res, int& noutliers) {
00222     res.clear();
00223     noutliers = 0;
00224     for (int i=0; i<v.length(); i++) {
00225         real k = (v[i] - mini) / (maxi - mini);
00226         int j = int(k*n);
00227         if (j >= n) {
00228             noutliers++;
00229             j = n-1;
00230         }
00231         if (j < 0) {
00232             noutliers++;
00233             j = 0;
00234         }
00235         res[j]++;
00236     }
00237     for (int i = 0; i<n; i++) {
00238         res[i] /= v.length();
00239     }
00240 }
00241 
00243 // computeGradient //
00245 void Optimizer::computeGradient(const Vec& gradient) {
00246     // Clear all what's left from previous computations
00247     this->proppath.clearGradient();
00248     this->params.clearGradient();
00249     this->cost->gradient[0] = 1;
00250     this->proppath.fbprop();
00251     this->params.copyGradientTo(gradient);
00252 }
00253 
00254 #ifdef DEBUGCG
00255 extern GhostScript* gs;
00256 #endif
00257 
00259 // computeOppositeGradient //
00261 void Optimizer::computeOppositeGradient(const Vec& gradient) {
00262     // Clear all what's left from previous computations
00263     this->proppath.clearGradient();
00264     this->params.clearGradient();
00265     // We want the opposite of the gradient, thus the -1
00266     this->cost->gradient[0] = -1;
00267     this->proppath.fbprop();
00268     this->params.copyGradientTo(gradient);
00269 #ifdef DEBUGCG
00270     gs->setcolor("blue");
00271     gs->drawCircle(this->params[0]->value[0],this->params[0]->value[1],0.02);
00272 #endif
00273 
00274 }
00275   
00276 } // end of namespace PLearn
00277 
00278 
00279 /*
00280   Local Variables:
00281   mode:c++
00282   c-basic-offset:4
00283   c-file-style:"stroustrup"
00284   c-file-offsets:((innamespace . 0)(inline-open . 0))
00285   indent-tabs-mode:nil
00286   fill-column:79
00287   End:
00288 */
00289 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines