PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // MatrixModule.cc 00004 // 00005 // Copyright (C) 2007 Olivier Delalleau 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Olivier Delalleau 00036 00041 #include "MatrixModule.h" 00042 #include <plearn/math/TMat_maths.h> 00043 00044 namespace PLearn { 00045 using namespace std; 00046 00047 PLEARN_IMPLEMENT_OBJECT( 00048 MatrixModule, 00049 "Module that sees a single matrix.", 00050 "" 00051 ); 00052 00054 // MatrixModule // 00056 MatrixModule::MatrixModule(const string& the_name, bool call_build_): 00057 inherited(the_name.empty() && call_build_ ? classname() : the_name, 00058 call_build_) 00059 { 00060 if (call_build_) 00061 build_(); 00062 } 00063 00065 // declareOptions // 00067 void MatrixModule::declareOptions(OptionList& ol) 00068 { 00069 // ### Declare all of this object's options here. 00070 // ### For the "flags" of each option, you should typically specify 00071 // ### one of OptionBase::buildoption, OptionBase::learntoption or 00072 // ### OptionBase::tuningoption. If you don't provide one of these three, 00073 // ### this option will be ignored when loading values from a script. 00074 // ### You can also combine flags, for example with OptionBase::nosave: 00075 // ### (OptionBase::buildoption | OptionBase::nosave) 00076 00077 declareOption(ol, "data", &MatrixModule::data, 00078 OptionBase::buildoption, 00079 "The matrix seen by this module."); 00080 00081 declareOption(ol, "data_gradient", &MatrixModule::data_gradient, 00082 OptionBase::buildoption, 00083 "The gradient w.r.t. 'data'. If not provided, is assumed to be 0."); 00084 00085 // Now call the parent class' declareOptions 00086 inherited::declareOptions(ol); 00087 } 00088 00090 // build_ // 00092 void MatrixModule::build_() 00093 {} 00094 00096 // build // 00098 void MatrixModule::build() 00099 { 00100 inherited::build(); 00101 build_(); 00102 } 00103 00105 // bpropAccUpdate // 00107 void MatrixModule::bpropAccUpdate(const TVec<Mat*>& ports_value, 00108 const TVec<Mat*>& ports_gradient) 00109 { 00110 PLASSERT( ports_gradient.length() == 1 ); 00111 Mat* grad = ports_gradient[0]; 00112 if (!grad) 00113 return; 00114 if (grad->isEmpty()) { 00115 // Accumulate 'data_gradient' into gradient (if there actually is a 00116 // gradient). 00117 grad->resize(data.length(), data.width()); 00118 if (!data_gradient.isEmpty()) { 00119 PLASSERT( data.length() == data_gradient.length() && 00120 data.width() == data_gradient.width() ); 00121 *grad += data_gradient; 00122 } 00123 } else { 00124 data_gradient.resize(grad->length(), grad->width()); 00125 data_gradient << *grad; 00126 PLERROR("In MatrixModule::bpropAccUpdate - Update of the underlying " 00127 "data matrix is not yet implemented"); 00128 } 00129 } 00130 00131 00133 // makeDeepCopyFromShallowCopy // 00135 void MatrixModule::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00136 { 00137 inherited::makeDeepCopyFromShallowCopy(copies); 00138 00139 deepCopyField(data, copies); 00140 deepCopyField(data_gradient, copies); 00141 } 00142 00144 // fprop // 00146 void MatrixModule::fprop(const Vec& input, Vec& output) const 00147 { 00148 PLERROR("In MatrixModule::fprop - Not implemented"); 00149 } 00150 00151 void MatrixModule::fprop(const TVec<Mat*>& ports_value) 00152 { 00153 PLASSERT( ports_value.length() == 1 ); 00154 Mat* mat = ports_value[0]; 00155 if (!mat) 00156 return; 00157 if (mat->isEmpty()) { 00158 // We want to query the value of the matrix. 00159 mat->resize(data.length(), data.width()); 00160 *mat << data; 00161 } else { 00162 // We want to store the value of the matrix. 00163 data.resize(mat->length(), mat->width()); 00164 data << *mat; 00165 } 00166 } 00167 00169 // bpropUpdate // 00171 /* THIS METHOD IS OPTIONAL 00172 void MatrixModule::bpropUpdate(const Vec& input, const Vec& output, 00173 Vec& input_gradient, 00174 const Vec& output_gradient, 00175 bool accumulate) 00176 { 00177 } 00178 */ 00179 00180 /* THIS METHOD IS OPTIONAL 00181 void MatrixModule::bpropUpdate(const Vec& input, const Vec& output, 00182 const Vec& output_gradient) 00183 { 00184 } 00185 */ 00186 00188 // bbpropUpdate // 00190 /* THIS METHOD IS OPTIONAL 00191 void MatrixModule::bbpropUpdate(const Vec& input, const Vec& output, 00192 Vec& input_gradient, 00193 const Vec& output_gradient, 00194 Vec& input_diag_hessian, 00195 const Vec& output_diag_hessian, 00196 bool accumulate) 00197 { 00198 } 00199 */ 00200 00201 /* THIS METHOD IS OPTIONAL 00202 void MatrixModule::bbpropUpdate(const Vec& input, const Vec& output, 00203 const Vec& output_gradient, 00204 const Vec& output_diag_hessian) 00205 { 00206 } 00207 */ 00208 00210 // forget // 00212 void MatrixModule::forget() 00213 { 00214 // Nothing to forget. 00215 } 00216 00218 // getPorts // 00220 const TVec<string>& MatrixModule::getPorts() 00221 { 00222 static TVec<string> ports; 00223 if (ports.isEmpty()) 00224 ports.append("data"); 00225 return ports; 00226 } 00227 00229 // getPortSizes // 00231 const TMat<int>& MatrixModule::getPortSizes() { 00232 port_sizes.resize(1, 2); 00233 port_sizes(0, 0) = data.length(); 00234 port_sizes(0, 1) = data.width(); 00235 return port_sizes; 00236 } 00237 00239 // finalize // 00241 /* THIS METHOD IS OPTIONAL 00242 void MatrixModule::finalize() 00243 { 00244 } 00245 */ 00246 00248 // getData // 00250 Mat& MatrixModule::getData() 00251 { 00252 return this->data; 00253 } 00254 00256 // bpropDoesNothing // 00258 /* THIS METHOD IS OPTIONAL 00259 bool MatrixModule::bpropDoesNothing() 00260 { 00261 } 00262 */ 00263 00265 // setGradientTo // 00267 void MatrixModule::setGradientTo(real g) 00268 { 00269 data_gradient.resize(data.length(), data.width()); 00270 data_gradient.fill(g); 00271 } 00272 00274 // setLearningRate // 00276 /* OPTIONAL 00277 void MatrixModule::setLearningRate(real dynamic_learning_rate) 00278 { 00279 } 00280 */ 00281 00283 // setData // 00285 void MatrixModule::setData(const Mat& the_data) 00286 { 00287 this->data = the_data; 00288 } 00289 00290 00291 } // end of namespace PLearn 00292 00293 00294 /* 00295 Local Variables: 00296 mode:c++ 00297 c-basic-offset:4 00298 c-file-style:"stroustrup" 00299 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00300 indent-tabs-mode:nil 00301 fill-column:79 00302 End: 00303 */ 00304 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :