PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // SurfaceTemplateLearner.cc 00004 // 00005 // Copyright (C) 2006 Pascal Lamblin and Olivier Delalleau 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Pascal Lamblin and Olivier Delalleau 00036 00040 #include "SurfaceTemplateLearner.h" 00041 #include "ScoreLayerVariable.h" 00042 00043 namespace PLearn { 00044 using namespace std; 00045 00046 PLEARN_IMPLEMENT_OBJECT( 00047 SurfaceTemplateLearner, 00048 "Neural-network to learn from molecular alignment.", 00049 "" 00050 ); 00051 00053 // SurfaceTemplateLearner // 00055 SurfaceTemplateLearner::SurfaceTemplateLearner(): 00056 min_feature_dev(1e-3), 00057 min_geom_dev(1e-3), 00058 simple_mixture(false) 00059 { 00060 nhidden2 = 10; 00061 // Set some NNet options whose value is fixed in this learner. 00062 nhidden = 0; 00063 noutputs = 1; 00064 output_transfer_func = "sigmoid"; 00065 cost_funcs = TVec<string>(1, "stable_cross_entropy"); 00066 transpose_first_hidden_layer = false; 00067 n_non_params_in_first_hidden_layer = 1; 00068 } 00069 00071 // declareOptions // 00073 void SurfaceTemplateLearner::declareOptions(OptionList& ol) 00074 { 00075 declareOption(ol, "min_feature_dev", 00076 &SurfaceTemplateLearner::min_feature_dev, 00077 OptionBase::buildoption, 00078 "Minimum feature standard deviations allowed."); 00079 00080 declareOption(ol, "min_geom_dev", 00081 &SurfaceTemplateLearner::min_geom_dev, 00082 OptionBase::buildoption, 00083 "Minimum geometric standard deviations allowed."); 00084 00085 // We rename 'first_hidden_layer' into 'score_layer' to avoid potential 00086 // confusion. 00087 declareOption(ol, "score_layer", 00088 &SurfaceTemplateLearner::first_hidden_layer, 00089 OptionBase::buildoption, 00090 "The layer of scores (should be a ScoreLayerVariable)."); 00091 00092 declareOption(ol, "simple_mixture", 00093 &SurfaceTemplateLearner::simple_mixture, 00094 OptionBase::buildoption, 00095 "If true, then instead of building another hidden layer on top of\n" 00096 "alignment scores, we use them directly in a mixture of Gaussians\n" 00097 "fashion to estimate the probability of being active."); 00098 00099 declareOption(ol, "templates_source", 00100 &SurfaceTemplateLearner::templates_source, 00101 OptionBase::buildoption, 00102 "The dataset where templates are taken from. If not provided, the\n" 00103 "training set will be used instead."); 00104 00105 // Now call the parent class' declareOptions 00106 inherited::declareOptions(ol); 00107 00108 // Redeclare parent's option to make this learner more user-friendly. 00109 00110 // 'nhidden' now modifies the 'nhidden2' parameter in NNet, since a 00111 // SurfaceTemplateLearner has always a first hidden layer that is a 00112 // ScoreLayerVariable. 00113 redeclareOption(ol, "nhidden", &SurfaceTemplateLearner::nhidden2, 00114 OptionBase::buildoption, 00115 "Number of hidden units."); 00116 00117 redeclareOption(ol, "nhidden2", &SurfaceTemplateLearner::nhidden2, 00118 OptionBase::nosave, 00119 "Not used (see nhidden)."); 00120 00121 redeclareOption(ol, "noutputs", &SurfaceTemplateLearner::noutputs, 00122 OptionBase::nosave, 00123 "Not used (= 1)."); 00124 00125 redeclareOption(ol, "bias_decay", &SurfaceTemplateLearner::bias_decay, 00126 OptionBase::nosave, 00127 "Not used (simplification)."); 00128 00129 redeclareOption(ol, "layer1_weight_decay", 00130 &SurfaceTemplateLearner::layer1_weight_decay, 00131 OptionBase::nosave, 00132 "Not used (simplification)."); 00133 00134 redeclareOption(ol, "layer1_bias_decay", 00135 &SurfaceTemplateLearner::layer1_bias_decay, 00136 OptionBase::nosave, 00137 "Not used (simplification)."); 00138 00139 redeclareOption(ol, "layer2_weight_decay", 00140 &SurfaceTemplateLearner::layer2_weight_decay, 00141 OptionBase::nosave, 00142 "Not used (simplification)."); 00143 00144 redeclareOption(ol, "layer2_bias_decay", 00145 &SurfaceTemplateLearner::layer2_bias_decay, 00146 OptionBase::nosave, 00147 "Not used (simplification)."); 00148 00149 redeclareOption(ol, "output_layer_weight_decay", 00150 &SurfaceTemplateLearner::output_layer_weight_decay, 00151 OptionBase::nosave, 00152 "Not used (simplification)."); 00153 00154 redeclareOption(ol, "output_layer_bias_decay", 00155 &SurfaceTemplateLearner::output_layer_bias_decay, 00156 OptionBase::nosave, 00157 "Not used (simplification)."); 00158 00159 redeclareOption(ol, "direct_in_to_out_weight_decay", 00160 &SurfaceTemplateLearner::direct_in_to_out_weight_decay, 00161 OptionBase::nosave, 00162 "Not used (simplification)."); 00163 00164 redeclareOption(ol, "L1_penalty", &SurfaceTemplateLearner::L1_penalty, 00165 OptionBase::nosave, 00166 "Not used (deprecated)."); 00167 00168 redeclareOption(ol, "fixed_output_weights", 00169 &SurfaceTemplateLearner::fixed_output_weights, 00170 OptionBase::nosave, 00171 "Not used (simplification)."); 00172 00173 redeclareOption(ol, "input_reconstruction_penalty", 00174 &SurfaceTemplateLearner::input_reconstruction_penalty, 00175 OptionBase::nosave, 00176 "Not used (simplification)."); 00177 00178 redeclareOption(ol, "direct_in_to_out", 00179 &SurfaceTemplateLearner::direct_in_to_out, 00180 OptionBase::nosave, 00181 "Not used (simplification)."); 00182 00183 redeclareOption(ol, "rbf_layer_size", 00184 &SurfaceTemplateLearner::rbf_layer_size, 00185 OptionBase::nosave, 00186 "Not used (simplification)."); 00187 00188 redeclareOption(ol, "first_class_is_junk", 00189 &SurfaceTemplateLearner::first_class_is_junk, 00190 OptionBase::nosave, 00191 "Not used (simplification)."); 00192 00193 redeclareOption(ol, "output_transfer_func", 00194 &SurfaceTemplateLearner::output_transfer_func, 00195 OptionBase::nosave, 00196 "Not used (= sigmoid or none, depending on 'simple_mixture')."); 00197 00198 redeclareOption(ol, "hidden_transfer_func", 00199 &SurfaceTemplateLearner::hidden_transfer_func, 00200 OptionBase::nosave, 00201 "Not used (simplification)."); 00202 00203 redeclareOption(ol, "first_hidden_layer", 00204 &SurfaceTemplateLearner::first_hidden_layer, 00205 OptionBase::nosave, 00206 "Not used (renamed to 'score_layer')."); 00207 00208 redeclareOption(ol, "transpose_first_hidden_layer", 00209 &SurfaceTemplateLearner::transpose_first_hidden_layer, 00210 OptionBase::nosave, 00211 "Not used (= false)."); 00212 00213 redeclareOption(ol, "n_non_params_in_first_hidden_layer", 00214 &SurfaceTemplateLearner::n_non_params_in_first_hidden_layer, 00215 OptionBase::nosave, 00216 "Not used (= 1 because of the 'final_output' variable in the\n" 00217 "ScoreLayerVariable)."); 00218 00219 redeclareOption(ol, "margin", &SurfaceTemplateLearner::margin, 00220 OptionBase::nosave, 00221 "Not used (simplification)."); 00222 00223 redeclareOption(ol, "do_not_change_params", 00224 &SurfaceTemplateLearner::do_not_change_params, 00225 OptionBase::nosave, 00226 "Not used (simplification)."); 00227 00228 redeclareOption(ol, "initialization_method", 00229 &SurfaceTemplateLearner::initialization_method, 00230 OptionBase::nosave, 00231 "Not used (simplification)."); 00232 00233 redeclareOption(ol, "forget_when_training_set_changes", 00234 &SurfaceTemplateLearner::forget_when_training_set_changes, 00235 OptionBase::nosave, 00236 "Not used (simplification)."); 00237 00238 redeclareOption(ol, "nservers", &SurfaceTemplateLearner::nservers, 00239 OptionBase::nosave, 00240 "Not used (simplification)."); 00241 00242 redeclareOption(ol, "save_trainingset_prefix", 00243 &SurfaceTemplateLearner::save_trainingset_prefix, 00244 OptionBase::nosave, 00245 "Not used (simplification)."); 00246 00247 } 00248 00250 // build_ // 00252 void SurfaceTemplateLearner::build_() 00253 { 00254 // Ensure the first hidden layer is a subclass of ScoreLayerVariable. 00255 if (first_hidden_layer) { 00256 PP<ScoreLayerVariable> score_layer = 00257 (ScoreLayerVariable*) ((Variable*) first_hidden_layer); 00258 if (!score_layer) 00259 PLERROR("In SurfaceTemplateLearner::build_ - The first hidden " 00260 "layer, as given by the 'score_layer' option, must be a " 00261 "subclass of ScoreLayerVariable"); 00262 // Set the minimum value for template standard deviations. 00263 if (score_layer->run_icp_var) { 00264 TVec< PP<ChemicalICP> > icp_aligners = 00265 score_layer->run_icp_var->icp_aligners; 00266 for (int i = 0; i < icp_aligners.length(); i++) { 00267 icp_aligners[i]->all_template_feat_dev-> 00268 setMinValue(min_feature_dev); 00269 icp_aligners[i]->template_geom_dev->setMinValue(min_geom_dev); 00270 } 00271 } 00272 // Set value of 'simple_mixture' option. 00273 score_layer->simple_mixture = this->simple_mixture; 00274 } 00275 } 00276 00278 // build // 00280 void SurfaceTemplateLearner::build() 00281 { 00282 // Very ugly hack: because NNet::build_() will perform a fprop(), we need a 00283 // sensible input value for this fprop, which means we need a training set. 00284 // A simple way to fix this would be to remove the 00285 // output_and_target_to_cost->recomputeParents() in the NNet build, but one 00286 // should make sure it does not break anything first. 00287 if (!train_set && templates_source) 00288 this->train_set = templates_source; 00289 00290 // Because the overall network is built in the NNet build, the simple 00291 // mixture case must be handled before calling it. 00292 first_hidden_layer_is_output = simple_mixture; 00293 00294 // Since we are already hacking this method, we may continue doing so: 00295 // depending on the value of 'simple_mixture' (true or false), the output 00296 // transfer function should be either nothing or a sigmoid. 00297 output_transfer_func = simple_mixture ? "none" : "sigmoid"; 00298 00299 inherited::build(); 00300 build_(); 00301 } 00302 00304 // makeDeepCopyFromShallowCopy // 00306 void SurfaceTemplateLearner::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00307 { 00308 inherited::makeDeepCopyFromShallowCopy(copies); 00309 00310 // ### Call deepCopyField on all "pointer-like" fields 00311 // ### that you wish to be deepCopied rather than 00312 // ### shallow-copied. 00313 // ### ex: 00314 // deepCopyField(trainvec, copies); 00315 deepCopyField(templates_source, copies); 00316 00317 // ### Remove this line when you have fully implemented this method. 00318 // PLERROR("SurfaceTemplateLearner::makeDeepCopyFromShallowCopy not fully (correctly) implemented yet!"); 00319 } 00320 00322 // setTrainingSet // 00324 void SurfaceTemplateLearner::setTrainingSet(VMat training_set, 00325 bool call_forget) 00326 { 00327 // Rebuild the internal score layer. 00328 PP<ScoreLayerVariable> score_layer = 00329 (ScoreLayerVariable*) ((Variable*) first_hidden_layer); 00330 score_layer->templates_source = 00331 this->templates_source ? this->templates_source : training_set; 00332 score_layer->setMappingsSource(training_set); 00333 score_layer->build(); 00334 00335 inherited::setTrainingSet(training_set, call_forget); 00336 } 00337 00339 // test // 00341 void SurfaceTemplateLearner::test(VMat testset, 00342 PP<VecStatsCollector> test_stats, 00343 VMat testoutputs, VMat testcosts) const 00344 { 00345 PP<ScoreLayerVariable> score_layer = 00346 (ScoreLayerVariable*) ((Variable*) first_hidden_layer); 00347 score_layer->setMappingsSource(testset); 00348 inherited::test(testset, test_stats, testoutputs, testcosts); 00349 } 00350 00351 /* 00352 void SurfaceTemplateLearner::forget() 00353 { 00354 inherited::forget(); 00355 } 00356 */ 00357 00359 // train // 00361 void SurfaceTemplateLearner::train() 00362 { 00363 PP<ScoreLayerVariable> score_layer = 00364 (ScoreLayerVariable*) ((Variable*) first_hidden_layer); 00365 score_layer->setMappingsSource(train_set); 00366 if (stage == 0) { 00367 // Make sure all ICP aligners forget any previously computed alignment. 00368 // This can be important when they use some memory scheme, since at 00369 // build time a first alignment might be performed, and should probably 00370 // be forgotten. 00371 TVec< PP<ChemicalICP> > icps = score_layer->run_icp_var->icp_aligners; 00372 for (int i = 0; i < icps.length(); i++) 00373 icps[i]->forgetMemorizedAlignments(); 00374 } 00375 inherited::train(); 00376 } 00377 00378 /* 00379 void SurfaceTemplateLearner::computeOutput(const Vec& input, Vec& output) const 00380 { 00381 // Compute the output from the input. 00382 // int nout = outputsize(); 00383 // output.resize(nout); 00384 // ... 00385 } 00386 00387 void SurfaceTemplateLearner::computeCostsFromOutputs(const Vec& input, const Vec& output, 00388 const Vec& target, Vec& costs) const 00389 { 00390 // Compute the costs from *already* computed output. 00391 // ... 00392 } 00393 */ 00394 00395 } // end of namespace PLearn 00396 00397 00398 /* 00399 Local Variables: 00400 mode:c++ 00401 c-basic-offset:4 00402 c-file-style:"stroustrup" 00403 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00404 indent-tabs-mode:nil 00405 fill-column:79 00406 End: 00407 */ 00408 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :