PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // AddLayersNNet.cc 00004 // 00005 // Copyright (C) 2004 Olivier Delalleau 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 /* ******************************************************* 00036 * $Id: AddLayersNNet.cc 8563 2008-02-22 21:14:48Z tihocan $ 00037 ******************************************************* */ 00038 00039 // Authors: Olivier Delalleau 00040 00044 #include "AddLayersNNet.h" 00045 #include <plearn/math/random.h> 00046 #include <plearn/var/AffineTransformWeightPenalty.h> 00047 #include <plearn/var/ConcatRowsVariable.h> 00048 #include <plearn/var/SubMatVariable.h> 00049 00050 namespace PLearn { 00051 using namespace std; 00052 00054 // AddLayersNNet // 00056 AddLayersNNet::AddLayersNNet() 00057 : added_hidden_transfer_func("tanh") 00058 {} 00059 00060 PLEARN_IMPLEMENT_OBJECT(AddLayersNNet, 00061 "This subclass of NNet allows one to add a hidden layer, possibly only for parts of the input.", 00062 "The hidden layer is added before the first hidden layer of NNet. You can't add\n" 00063 "two successive hidden layers, but you can add a hidden layer for each part of the\n" 00064 "input. The input is divided in parts by the 'parts_size' option, and for each part\n" 00065 "you can specify how many hidden units we add, with the 'add_hidden' option. If no\n" 00066 "hidden layer is added for a part, this part is directly connected to the first\n" 00067 "hidden layer of the classical NNet. For each part, a different hidden layer is\n" 00068 "created, so that if you want two parts to use the same hidden layer, you should\n" 00069 "concatenate those parts into a single one.\n" 00070 "In the simple case where you just want to add a single hidden layer, you should set:\n" 00071 " - parts_size = [ -1 ]\n" 00072 " - add_hidden = [ number_of_hidden_units_added ]\n" 00073 ); 00074 00076 // declareOptions // 00078 void AddLayersNNet::declareOptions(OptionList& ol) 00079 { 00080 // ### For the "flags" of each option, you should typically specify 00081 // ### one of OptionBase::buildoption, OptionBase::learntoption or 00082 // ### OptionBase::tuningoption. Another possible flag to be combined with 00083 // ### is OptionBase::nosave 00084 00085 // Build options. 00086 00087 declareOption(ol, "parts_size", &AddLayersNNet::parts_size, OptionBase::buildoption, 00088 "The size of each part. '-1' can be used to specify this part's size should\n" 00089 "be such that all inputs are considered ('-1' can thus only appear once)."); 00090 00091 declareOption(ol, "add_hidden", &AddLayersNNet::add_hidden, OptionBase::buildoption, 00092 "Specify for each part how many hidden units we want to add."); 00093 00094 declareOption(ol, "added_hidden_transfer_func", &AddLayersNNet::added_hidden_transfer_func, OptionBase::buildoption, 00095 "The transfer function for the added hidden layers."); 00096 00097 // Learnt options. 00098 00099 // declareOption(ol, "myoption", &AddLayersNNet::myoption, OptionBase::learntoption, 00100 // "Help text describing this option"); 00101 00102 // Now call the parent class' declareOptions. 00103 inherited::declareOptions(ol); 00104 } 00105 00107 // build // 00109 void AddLayersNNet::build() 00110 { 00111 // We ensure that weights are not filled with random numbers, in order to be 00112 // able to compare with a classical NNet using the same seed. 00113 string initialization_method_backup = initialization_method; 00114 bool do_not_change_params_backup = do_not_change_params; 00115 initialization_method = "zero"; 00116 do_not_change_params = true; 00117 inherited::build(); 00118 initialization_method = initialization_method_backup; 00119 do_not_change_params = do_not_change_params_backup; 00120 build_(); 00121 } 00122 00124 // build_ // 00126 void AddLayersNNet::build_() 00127 { 00128 // ### This method should do the real building of the object, 00129 // ### according to set 'options', in *any* situation. 00130 // ### Typical situations include: 00131 // ### - Initial building of an object from a few user-specified options 00132 // ### - Building of a "reloaded" object: i.e. from the complete set of all serialised options. 00133 // ### - Updating or "re-building" of an object after a few "tuning" options have been modified. 00134 // ### You should assume that the parent class' build_() has already been called. 00135 00136 // Don't do anything if we do not have an inputsize. 00137 if (inputsize_ < 0) 00138 return; 00139 if (parts_size.isEmpty() || add_hidden.isEmpty()) 00140 PLERROR("In AddLayersNNet::build_ - You must fill both 'parts_size' and 'add_hidden'"); 00141 if (parts_size.length() != add_hidden.length()) 00142 PLERROR("In AddLayersNNet::build_ - 'parts_size' and 'add_hidden' must have the same length"); 00143 int n_parts = parts_size.length(); 00144 int count_parts_size = 0; 00145 bool found_minus_one = false; 00146 int minus_one_index = -1; 00147 for (int i = 0; i < n_parts; i++) { 00148 if (parts_size[i] >= 0) { 00149 count_parts_size += parts_size[i]; 00150 } else if (parts_size[i] == -1) { 00151 if (found_minus_one) { 00152 PLERROR("In AddLayersNNet::build_ - There can be only one '-1' in 'parts_size'"); 00153 } else { 00154 // There is a '-1'. 00155 found_minus_one = true; 00156 minus_one_index = i; 00157 } 00158 } else { 00159 // There is a negative value that is not -1, that should not happen. 00160 PLERROR("In AddLayersNNet::build_ - Wrong value for parts_size[%d]: %d", i, parts_size[i]); 00161 } 00162 } 00163 if (count_parts_size > inputsize_) 00164 PLERROR("In AddLayersNNet::build_ - The sum of all parts size (%d) exceeds the inputsize (%d)", count_parts_size, inputsize_); 00165 if (found_minus_one) { 00166 real_parts_size.resize(parts_size.length()); 00167 real_parts_size << parts_size; 00168 real_parts_size[minus_one_index] = inputsize_ - count_parts_size; 00169 } else { 00170 real_parts_size = parts_size; 00171 if (count_parts_size != inputsize_) 00172 PLERROR("In AddLayersNNet::build_ - The sum of all parts size (%d) is less than inputsize (%d)", count_parts_size, inputsize_); 00173 } 00174 00175 // Now we redo the graph of variables, even if there is no added layer 00176 // (because the weights are not initialized in the parent class, since 00177 // 'initialization_method' is forced to 'zero' at build time). 00178 00179 params.resize(0); 00180 00181 // Create a Var for each part. 00182 VarArray input_parts(n_parts); 00183 int index = 0; 00184 for (int i = 0; i < n_parts; i++) { 00185 input_parts[i] = subMat(input, index, 0, real_parts_size[i], 1); 00186 input_parts[i]->setName("input_part_" + tostring(i)); 00187 index += real_parts_size[i]; 00188 } 00189 00190 // Add the required hidden layers. 00191 hidden_layers.resize(n_parts); 00192 hidden_weights.resize(n_parts); 00193 for (int i = 0; i < n_parts; i++) { 00194 if (add_hidden[i] > 0) { 00195 Var weights = Var(1 + real_parts_size[i], add_hidden[i], ("w_added_" + tostring(i)).c_str()); 00196 hidden_layers[i] = hiddenLayer(input_parts[i], weights, added_hidden_transfer_func); 00197 hidden_weights[i] = weights; 00198 params.append(hidden_weights[i]); 00199 } else { 00200 hidden_layers[i] = input_parts[i]; 00201 } 00202 } 00203 00204 // Create the concatenated "input" to the regular NNet. 00205 Var concat_input = vconcat(hidden_layers); 00206 00207 Var hidden_layer; 00208 Var before_transfer_func; 00209 00210 // Build main network graph. 00211 buildOutputFromInput(concat_input, hidden_layer, before_transfer_func); 00212 00213 // Build target and weight variables. 00214 buildTargetAndWeight(); 00215 00216 // Build costs. 00217 buildCosts(output, target, hidden_layer, before_transfer_func); 00218 00219 // Shared values hack... 00220 if (!do_not_change_params) { 00221 if(paramsvalues.length() == params.nelems()) 00222 params << paramsvalues; 00223 else 00224 { 00225 paramsvalues.resize(params.nelems()); 00226 initializeParams(); 00227 if(optimizer) 00228 optimizer->reset(); 00229 } 00230 params.makeSharedValue(paramsvalues); 00231 } 00232 00233 // Build functions. 00234 buildFuncs(input, output, target, sampleweight, NULL); 00235 00236 } 00237 00239 // buildPenalties // 00241 void AddLayersNNet::buildPenalties(const Var& hidden_layer) { 00242 inherited::buildPenalties(hidden_layer); 00243 if (hidden_weights.length() != parts_size.length()) 00244 // The hidden weights have not yet been correctly initialized. 00245 return; 00246 for (int i = 0; i < parts_size.length(); i++) { 00247 if (add_hidden[i] > 0 && (weight_decay > 0 || bias_decay > 0)) { 00248 penalties.append(affine_transform_weight_penalty(hidden_weights[i], weight_decay, bias_decay, penalty_type)); 00249 } 00250 } 00251 } 00252 00254 // getHiddenUnitsActivation // 00256 Vec AddLayersNNet::getHiddenUnitsActivation(int layer) { 00257 return hidden_layers[layer]->value; 00258 } 00259 00261 // getHiddenWeights // 00263 Mat AddLayersNNet::getHiddenWeights(int layer) { 00264 return hidden_weights[layer]->matValue; 00265 } 00266 00268 // getOutputHiddenWeights // 00270 Mat AddLayersNNet::getOutputHiddenWeights(int layer) { 00271 int count = 0; 00272 for (int i = 0; i < layer; i++) 00273 count += real_parts_size[i]; 00274 return w1->matValue.subMatRows(count, add_hidden[layer]); 00275 } 00276 00278 // initializeParams // 00280 void AddLayersNNet::initializeParams(bool set_seed) { 00281 // TODO Remove later... 00282 if (set_seed) { 00283 if (seed_>=0) 00284 manual_seed(seed_); 00285 else 00286 PLearn::seed(); 00287 } 00288 for (int i = 0; i < add_hidden.size(); i++) 00289 if (add_hidden[i] > 0) 00290 fillWeights(hidden_weights[i], true); 00291 inherited::initializeParams(false); // TODO Put this first later. 00292 } 00293 00295 // makeDeepCopyFromShallowCopy // 00297 void AddLayersNNet::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00298 { 00299 inherited::makeDeepCopyFromShallowCopy(copies); 00300 deepCopyField(real_parts_size, copies); 00301 deepCopyField(hidden_layers, copies); 00302 deepCopyField(hidden_weights, copies); 00303 deepCopyField(add_hidden, copies); 00304 deepCopyField(parts_size, copies); 00305 } 00306 00307 } // end of namespace PLearn 00308 00309 00310 /* 00311 Local Variables: 00312 mode:c++ 00313 c-basic-offset:4 00314 c-file-style:"stroustrup" 00315 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00316 indent-tabs-mode:nil 00317 fill-column:79 00318 End: 00319 */ 00320 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :