PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // RBMMixedLayer.cc 00004 // 00005 // Copyright (C) 2006 Pascal Lamblin & Dan Popovici 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Pascal Lamblin & Dan Popovici 00036 00039 #include "RBMMixedLayer.h" 00040 #include <plearn/math/TMat_maths.h> 00041 #include "RBMParameters.h" 00042 00043 namespace PLearn { 00044 using namespace std; 00045 00046 PLEARN_IMPLEMENT_OBJECT( 00047 RBMMixedLayer, 00048 "Layer in an RBM, concatenating other sub-layers", 00049 ""); 00050 00051 RBMMixedLayer::RBMMixedLayer() 00052 { 00053 } 00054 00055 RBMMixedLayer::RBMMixedLayer( TVec< PP<RBMLayer> > the_sub_layers ) : 00056 sub_layers( the_sub_layers ) 00057 { 00058 build(); 00059 } 00060 00063 void RBMMixedLayer::getUnitActivations( int i, PP<RBMParameters> rbmp, 00064 int offset ) 00065 { 00066 int j = layer_of_unit[i]; 00067 PP<RBMLayer> layer = sub_layers[i]; 00068 int sub_index = i - init_positions[j]; 00069 int total_offset = offset + init_positions[j]; 00070 00071 layer->getUnitActivations( sub_index, rbmp, total_offset ); 00072 expectation_is_up_to_date = false; 00073 } 00074 00077 void RBMMixedLayer::getAllActivations( PP<RBMParameters> rbmp, int offset ) 00078 { 00079 for( int i=0 ; i<n_layers ; i++ ) 00080 { 00081 int total_offset = offset + init_positions[i]; 00082 sub_layers[i]->getAllActivations( rbmp, total_offset ); 00083 } 00084 expectation_is_up_to_date = false; 00085 } 00086 00087 void RBMMixedLayer::generateSample() 00088 { 00089 for( int i=0 ; i<n_layers ; i++ ) 00090 sub_layers[i]->generateSample(); 00091 } 00092 00093 void RBMMixedLayer::computeExpectation() 00094 { 00095 if( expectation_is_up_to_date ) 00096 return; 00097 00098 for( int i=0 ; i<n_layers ; i++ ) 00099 sub_layers[i]->computeExpectation(); 00100 00101 expectation_is_up_to_date = true; 00102 } 00103 00104 void RBMMixedLayer::bpropUpdate(const Vec& input, const Vec& output, 00105 Vec& input_gradient, 00106 const Vec& output_gradient) 00107 { 00108 PLERROR( "RBMMixedLayer::bpropUpdate not implemented yet." ); 00109 } 00110 00111 void RBMMixedLayer::declareOptions(OptionList& ol) 00112 { 00113 declareOption(ol, "sub_layers", &RBMMixedLayer::sub_layers, 00114 OptionBase::buildoption, 00115 "The concatenated RBMLayers composing this layer."); 00116 00117 declareOption(ol, "init_positions", &RBMMixedLayer::init_positions, 00118 OptionBase::learntoption, 00119 " Initial index of the sub_layers."); 00120 00121 declareOption(ol, "layer_of_unit", &RBMMixedLayer::layer_of_unit, 00122 OptionBase::learntoption, 00123 "layer_of_unit[i] is the index of sub_layer containing unit" 00124 " i."); 00125 00126 declareOption(ol, "n_layers", &RBMMixedLayer::n_layers, 00127 OptionBase::learntoption, 00128 "Number of sub-layers."); 00129 00130 // Now call the parent class' declareOptions 00131 inherited::declareOptions(ol); 00132 } 00133 00134 void RBMMixedLayer::build_() 00135 { 00136 units_types = ""; 00137 size = 0; 00138 activations.resize( 0 ); 00139 sample.resize( 0 ); 00140 expectation.resize( 0 ); 00141 expectation_is_up_to_date = false; 00142 layer_of_unit.resize( 0 ); 00143 00144 n_layers = sub_layers.size(); 00145 init_positions.resize( n_layers ); 00146 00147 for( int i=0 ; i<n_layers ; i++ ) 00148 { 00149 init_positions[i] = size; 00150 00151 PP<RBMLayer> cur_layer = sub_layers[i]; 00152 units_types += cur_layer->units_types; 00153 size += cur_layer->size; 00154 00155 activations = merge( activations, cur_layer->activations ); 00156 sample = merge( sample, cur_layer->sample ); 00157 expectation = merge( expectation, cur_layer->expectation ); 00158 layer_of_unit.append( TVec<int>( cur_layer->size, i ) ); 00159 } 00160 } 00161 00162 void RBMMixedLayer::build() 00163 { 00164 inherited::build(); 00165 build_(); 00166 } 00167 00168 00169 void RBMMixedLayer::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00170 { 00171 inherited::makeDeepCopyFromShallowCopy(copies); 00172 00173 deepCopyField(sub_layers, copies); 00174 deepCopyField(init_positions, copies); 00175 deepCopyField(layer_of_unit, copies); 00176 } 00177 00178 00179 } // end of namespace PLearn 00180 00181 00182 /* 00183 Local Variables: 00184 mode:c++ 00185 c-basic-offset:4 00186 c-file-style:"stroustrup" 00187 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00188 indent-tabs-mode:nil 00189 fill-column:79 00190 End: 00191 */ 00192 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :