PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // ConjRosenbrock.cc 00004 // 00005 // Copyright (C) 2006 Nicolas Chapados 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Nicolas Chapados 00036 00039 // From C++ stdlib 00040 #include <iostream> 00041 00042 // From PLearn 00043 #include <plearn/base/lexical_cast.h> 00044 #include <plearn/io/pl_log.h> 00045 #include <plearn/io/openFile.h> 00046 00047 #include <plearn/var/Variable.h> 00048 #include <plearn/var/Var_operators.h> 00049 #include <plearn/var/Func.h> 00050 #include <plearn/var/PowVariable.h> 00051 #include <plearn/var/SubMatVariable.h> 00052 #include <plearn/var/SumVariable.h> 00053 00054 #include <plearn/opt/GradientOptimizer.h> 00055 #include <plearn/opt/ConjGradientOptimizer.h> 00056 00057 #include "ConjRosenbrock.h" 00058 00059 namespace PLearn { 00060 using namespace std; 00061 00062 // This Func returns the evaluation of the Rosenbrock function. 00063 // We assume that the input/output are represented as ROW vectors. 00064 static Func rosenbrock(int D) 00065 { 00066 // D = length(x); 00067 // f = sum(100*(x(2:D)-x(1:D-1).^2).^2 + (1-x(1:D-1)).^2); 00068 Var input(1, D, "input"); 00069 Var drop_first = subMat(input, 0, 1, 1, D-1); 00070 Var drop_last = subMat(input, 0, 0, 1, D-1); 00071 Var drop_last_sq = pow(drop_last, 2); 00072 Var diff_100x_sq = pow(drop_first - drop_last_sq, 2) * 100.0; 00073 Var second_term = pow(1 - drop_last,2); 00074 Var rosenbrock = sum(diff_100x_sq + second_term); 00075 00076 return Func(VarArray(input), VarArray(rosenbrock)); 00077 } 00078 00079 PLEARN_IMPLEMENT_OBJECT( 00080 ConjRosenbrock, 00081 "Exercises the Conjugate Gradient optimizer through the Rosenbrock Function.", 00082 "" 00083 ); 00084 00086 // ConjRosenbrock // 00088 ConjRosenbrock::ConjRosenbrock() 00089 : D(2) 00090 { } 00091 00093 // build // 00095 void ConjRosenbrock::build() 00096 { 00097 inherited::build(); 00098 build_(); 00099 } 00100 00102 // makeDeepCopyFromShallowCopy // 00104 void ConjRosenbrock::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00105 { 00106 inherited::makeDeepCopyFromShallowCopy(copies); 00107 00108 // ### Call deepCopyField on all "pointer-like" fields 00109 // ### that you wish to be deepCopied rather than 00110 // ### shallow-copied. 00111 // ### ex: 00112 // deepCopyField(trainvec, copies); 00113 00114 // ### Remove this line when you have fully implemented this method. 00115 PLERROR("ConjRosenbrock::makeDeepCopyFromShallowCopy not fully (correctly) implemented yet!"); 00116 } 00117 00119 // declareOptions // 00121 void ConjRosenbrock::declareOptions(OptionList& ol) 00122 { 00123 declareOption(ol, "opt", &ConjRosenbrock::opt, OptionBase::buildoption, 00124 "Optimizer to use, with options."); 00125 00126 declareOption(ol, "D", &ConjRosenbrock::D, OptionBase::buildoption, 00127 "Dimensionality of the Rosenbrock problem to solve"); 00128 00129 // Now call the parent class' declareOptions 00130 inherited::declareOptions(ol); 00131 } 00132 00134 // build_ // 00136 void ConjRosenbrock::build_() 00137 { 00138 if (! opt) 00139 PLERROR("%s: the 'opt' option must be specified", __FUNCTION__); 00140 } 00141 00143 // perform // 00145 void ConjRosenbrock::perform() 00146 { 00147 // Change verbosity 00148 PL_Log::instance().verbosity(VLEVEL_DBG); 00149 PL_Log::instance().enableNamedLogging( 00150 "Optimizer,GradientOptimizer,ConjGradientOptimizer"); 00151 00152 // Set up the Rosenbrock problem 00153 Func r = rosenbrock(D); 00154 Vec input(D); // Initialized to 0.0 00155 pout << "rosenbrock(" << input << ") = " << r(input) << endl; 00156 r->verifyGradient(input,1e-6); // Check gradient at 0^D 00157 input.fill(1.0); 00158 r->verifyGradient(input,1e-6); // Check gradient at 1^D 00159 // r->verifyGradient(-1,1,1e-6); // Check gradient at random point 00160 00161 // Set up the optimizer and go 00162 r->inputs[0]->value.fill(0.0); 00163 opt->reset(); 00164 opt->setToOptimize(r->inputs, r->outputs); 00165 opt->build(); 00166 00167 VecStatsCollector vsc; 00168 opt->optimizeN(vsc); 00169 00170 // Print information 00171 pout << "\nAfter optimization:" 00172 << "\ninputs = " << r->inputs[0]->value 00173 << "\noutput = " << r->outputs[0]->value 00174 << "\n\nOptimization stats collector: " << vsc; 00175 00176 } 00177 00178 } // end of namespace PLearn 00179 00180 00181 /* 00182 Local Variables: 00183 mode:c++ 00184 c-basic-offset:4 00185 c-file-style:"stroustrup" 00186 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00187 indent-tabs-mode:nil 00188 fill-column:79 00189 End: 00190 */ 00191 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :