PLearn 0.1
Distribution.cc
Go to the documentation of this file.
00001 
00002 
00003 // -*- C++ -*-
00004 
00005 // Distribution.cc
00006 // 
00007 // Copyright (C) *YEAR* *AUTHOR(S)* 
00008 // ...
00009 // Copyright (C) *YEAR* *AUTHOR(S)* 
00010 // 
00011 // Redistribution and use in source and binary forms, with or without
00012 // modification, are permitted provided that the following conditions are met:
00013 // 
00014 //  1. Redistributions of source code must retain the above copyright
00015 //     notice, this list of conditions and the following disclaimer.
00016 // 
00017 //  2. Redistributions in binary form must reproduce the above copyright
00018 //     notice, this list of conditions and the following disclaimer in the
00019 //     documentation and/or other materials provided with the distribution.
00020 // 
00021 //  3. The name of the authors may not be used to endorse or promote
00022 //     products derived from this software without specific prior written
00023 //     permission.
00024 // 
00025 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00026 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00027 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00028 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00029 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00030 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00031 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00032 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00033 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00034 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00035 // 
00036 // This file is part of the PLearn library. For more information on the PLearn
00037 // library, go to the PLearn Web site at www.plearn.org
00038 
00039 /* *******************************************************      
00040  * $Id: Distribution.cc 9418 2008-09-02 15:33:46Z nouiz $ 
00041  ******************************************************* */
00042 
00044 #include "Distribution.h"
00045 #include <plearn/ker/NegOutputCostFunction.h>
00046 
00047 namespace PLearn {
00048 using namespace std;
00049 
00050 Distribution::Distribution() 
00051     :Learner(0,1,1), use_returns_what("l")
00052 {
00053     // cost function is -log_density
00054     setTestCostFunctions(neg_output_costfunc());
00055 }
00056 
00057 PLEARN_IMPLEMENT_OBJECT(Distribution,
00058                         "This class is deprecated, use PDistribution instead.",
00059                         "NO HELP");
00060 
00061 void Distribution::declareOptions(OptionList& ol)
00062 {
00063     // ### Declare all of this object's options here
00064     // ### For the "flags" of each option, you should typically specify  
00065     // ### one of OptionBase::buildoption, OptionBase::learntoption or 
00066     // ### OptionBase::tuningoption. Another possible flag to be combined with
00067     // ### is OptionBase::nosave
00068   
00069     declareOption(ol, "use_returns_what", &Distribution::use_returns_what, OptionBase::buildoption,
00070                   "A string where the characters have the following meaning: \n"
00071                   "'l'-> log_density, 'd' -> density, 'c' -> cdf, 's' -> survival_fn, 'e' -> expectation, 'v' -> variance");
00072   
00073     // Now call the parent class' declareOptions
00074     inherited::declareOptions(ol);
00075 }
00076 
00077 void Distribution::build_()
00078 {
00079     // ### This method should do the real building of the object,
00080     // ### according to set 'options', in *any* situation. 
00081     // ### Typical situations include:
00082     // ###  - Initial building of an object from a few user-specified options
00083     // ###  - Building of a "reloaded" object: i.e. from the complete set of all serialised options.
00084     // ###  - Updating or "re-building" of an object after a few "tuning" options have been modified.
00085     // ### You should assume that the parent class' build_() has already been called.
00086 
00087     outputsize_ = use_returns_what.length();
00088 }
00089 
00090 // ### Nothing to add here, simply calls build_
00091 void Distribution::build()
00092 {
00093     inherited::build();
00094     build_();
00095 }
00096 
00097 
00098 void Distribution::train(VMat training_set)
00099 { 
00100     if(training_set->width() != inputsize()+targetsize())
00101         PLERROR("In Distribution::train(VMat training_set) training_set->width() != inputsize()+targetsize()");
00102   
00103     setTrainingSet(training_set);
00104   
00105     // ### Please implement the actual training of the model.
00106     // ### For models with incremental training, to benefit 
00107     // ### from the "testing during training" and early-stopping 
00108     // ### mechanisms, you should make sure to call measure at 
00109     // ### every "epoch" (whatever epoch means for your algorithm).
00110     // ### ex:
00111     // if(measure(epoch,costvec)) 
00112     //     break; // exit training loop because early-stopping contditions were met
00113 }
00114 
00115 void Distribution::use(const Vec& input, Vec& output)
00116 {
00117     int l = (int)use_returns_what.length();
00118     for(int i=0; i<l; i++)
00119     {
00120         switch(use_returns_what[i])
00121         {
00122         case 'l':
00123             output[i] = (real) log_density(input);
00124             break;
00125         case 'd':
00126             output[i] = (real) density(input);
00127             break;
00128         case 'c':
00129             output[i] = (real) cdf(input);
00130             break;
00131         case 's':
00132             output[i] = (real) survival_fn(input);
00133             break;
00134         default:
00135             PLERROR("In Distribution::use unknown use_returns_what character");
00136         }
00137     }
00138 }
00139 
00140 void Distribution::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00141 {
00142     Learner::makeDeepCopyFromShallowCopy(copies);
00143 }
00144 
00145 double Distribution::log_density(const Vec& x) const
00146 { PLERROR("density not implemented for this Distribution"); return 0; }
00147 
00148 double Distribution::density(const Vec& x) const
00149 { return exp(log_density(x)); }
00150   
00151 double Distribution::survival_fn(const Vec& x) const
00152 { PLERROR("survival_fn not implemented for this Distribution"); return 0; }
00153 
00154 double Distribution::cdf(const Vec& x) const
00155 { PLERROR("cdf not implemented for this Distribution"); return 0; }
00156 
00157 Vec Distribution::expectation() const
00158 { PLERROR("expectation not implemented for this Distribution"); return Vec(); }
00159 
00160 Mat Distribution::variance() const
00161 { PLERROR("variance not implemented for this Distribution"); return Mat(); }
00162 
00163 void Distribution::generate(Vec& x) const
00164 { PLERROR("generate not implemented for this Distribution"); }
00165 
00166 
00167 } // end of namespace PLearn
00168 
00169 
00170 /*
00171   Local Variables:
00172   mode:c++
00173   c-basic-offset:4
00174   c-file-style:"stroustrup"
00175   c-file-offsets:((innamespace . 0)(inline-open . 0))
00176   indent-tabs-mode:nil
00177   fill-column:79
00178   End:
00179 */
00180 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines