PLearn 0.1
NeuralNet.h
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // NeuralNet.h
00004 // Copyright (c) 1998-2002 Pascal Vincent
00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal
00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme
00007 //
00008 // Redistribution and use in source and binary forms, with or without
00009 // modification, are permitted provided that the following conditions are met:
00010 // 
00011 //  1. Redistributions of source code must retain the above copyright
00012 //     notice, this list of conditions and the following disclaimer.
00013 // 
00014 //  2. Redistributions in binary form must reproduce the above copyright
00015 //     notice, this list of conditions and the following disclaimer in the
00016 //     documentation and/or other materials provided with the distribution.
00017 // 
00018 //  3. The name of the authors may not be used to endorse or promote
00019 //     products derived from this software without specific prior written
00020 //     permission.
00021 // 
00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00032 // 
00033 // This file is part of the PLearn library. For more information on the PLearn
00034 // library, go to the PLearn Web site at www.plearn.org
00035 
00036 
00037 /* *******************************************************      
00038  * $Id: NeuralNet.h 8321 2007-11-28 21:37:09Z nouiz $
00039  ******************************************************* */
00040 
00041 
00042 #ifndef NeuralNet_INC
00043 #define NeuralNet_INC
00044 
00045 #include "Learner.h"
00046 #include <plearn/opt/Optimizer.h>
00047 //#include "Var_all.h"
00048 
00049 namespace PLearn {
00050 using namespace std;
00051 
00055 class NeuralNet: public Learner
00056 {
00057 protected:
00058     Var input;  // Var(inputsize())
00059     Var target; // Var(targetsize()-weightsize())
00060     Var costweights; // Var(weightsize())
00061     Var target_and_weights;// hconcat(target&costweights)
00062     Var w1; // bias and weights of first hidden layer
00063     Var w2; // bias and weights of second hidden layer
00064     Var wout; // bias and weights of output layer
00065     Var wdirect; // bias and weights for direct in-to-out connection
00066 
00067     Var output;
00068     VarArray costs; // al costs of interest
00069     Var cost; // cost for one (input,target)
00070 
00071     VarArray params;  // all arameter input vars
00072 
00073     Vec paramsvalues; // values of all parameters
00074     Vec initial_paramsvalues;
00075 
00076 public:
00077     Func f; // input -> output
00078     Func costf; // input & target -> output & cost
00079     Func output_and_target_to_cost; // output & target -> cost
00080 
00081 public:
00082 
00083     typedef Learner inherited;
00084 
00085     // Build options inherited from learner:
00086     // inputsize, outputszie, targetsize, experiment_name, save_at_every_epoch 
00087 
00088     // Build options:
00089     int nhidden;    // number of hidden units in first hidden layer (default:0)
00090     int nhidden2;   // number of hidden units in second hidden layer (default:0)
00091 
00092     real weight_decay; // default: 0
00093     real bias_decay;   // default: 0 
00094     real layer1_weight_decay; // default: MISSING_VALUE
00095     real layer1_bias_decay;   // default: MISSING_VALUE
00096     real layer2_weight_decay; // default: MISSING_VALUE
00097     real layer2_bias_decay;   // default: MISSING_VALUE
00098     real output_layer_weight_decay; // default: MISSING_VALUE
00099     real output_layer_bias_decay;   // default: MISSING_VALUE
00100     real direct_in_to_out_weight_decay; // default: MISSING_VALUE
00101 
00102     bool global_weight_decay; // default: false
00103     bool direct_in_to_out; // should we include direct input to output connecitons? default: false
00104     string output_transfer_func; // tanh, sigmoid, softplus, softmax  (default: "" means no transfer function)
00105     int iseed; // seed for random nb generator, -1 = use time of day
00106 
00108     // where the cost functions can be one of mse, mse_onehot, NLL,
00109     // class_error or multiclass_error (no default), or semisupervised_prob_class
00110     // (if the latter, then the semisupervised_flatten_factor option must be set).
00111     Array<string> cost_funcs;  
00112     real semisupervised_flatten_factor; // see SemiSupervisedProbClassCostVariable for explanations
00113     Vec semisupervised_prior; // see SemiSupervisedProbClassCostVariable for explanations
00114 
00115     // Build options related to the optimization:
00116     PP<Optimizer> optimizer; // the optimizer to use (no default)
00117 
00118     int batch_size; // how many samples to use to estimate gradient before an update
00119                     // 0 means the whole training set (default: 1)
00120 
00121     int nepochs;   // how many times the optimizer gets to see the training set (default: 10000).
00122 
00123     string saveparams; // where to optionally save params after training
00124 
00125     Array<Vec> normalization;
00126 private:
00127     void build_();
00128 
00129 public:
00130 
00131     NeuralNet();
00132     virtual ~NeuralNet();
00133     PLEARN_DECLARE_OBJECT(NeuralNet);
00134 
00135     // Learner methods
00136 
00137     virtual void build();
00138     virtual void forget(); // simply calls initializeParams()
00139     void initializeParams();
00140 
00141     virtual void train(VMat training_set); 
00142     virtual void use(const Vec& inputvec, Vec& prediction); 
00143 
00144     virtual int costsize() const;
00145     virtual Array<string> costNames() const;
00146     virtual Array<string> testResultsNames() { return costNames(); }
00147     virtual void useAndCost(const Vec& inputvec, const Vec& targetvec, 
00148                             Vec outputvec, Vec costvec);
00149     void computeCost(const Vec& inputvec, const Vec& targetvec, 
00150                      const Vec& outputvec, const Vec& costvec);
00151 
00152     virtual void makeDeepCopyFromShallowCopy(CopiesMap &copies);
00153 protected:
00154     static void declareOptions(OptionList& ol);
00155 
00156 };
00157 
00158 DECLARE_OBJECT_PTR(NeuralNet);
00159 
00160 } // end of namespace PLearn
00161 
00162 #endif
00163 
00164 
00165 /*
00166   Local Variables:
00167   mode:c++
00168   c-basic-offset:4
00169   c-file-style:"stroustrup"
00170   c-file-offsets:((innamespace . 0)(inline-open . 0))
00171   indent-tabs-mode:nil
00172   fill-column:79
00173   End:
00174 */
00175 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines