PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // Distribution.cc 00004 // 00005 // Copyright (C) 2002 Pascal Vincent 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 /* ******************************************************* 00036 * $Id: LocallyWeightedDistribution.cc 9418 2008-09-02 15:33:46Z nouiz $ 00037 ******************************************************* */ 00038 00041 #include "LocallyWeightedDistribution.h" 00042 #include <plearn/vmat/ConcatColumnsVMatrix.h> 00043 00044 namespace PLearn { 00045 using namespace std; 00046 00047 LocallyWeightedDistribution::LocallyWeightedDistribution() 00048 {} 00049 00050 00051 PLEARN_IMPLEMENT_OBJECT(LocallyWeightedDistribution, "ONE LINE DESCR", "NO HELP"); 00052 00053 void LocallyWeightedDistribution::declareOptions(OptionList& ol) 00054 { 00055 declareOption(ol, "weighting_kernel", &LocallyWeightedDistribution::weighting_kernel, OptionBase::buildoption, 00056 "The kernel that will be used to locally weigh the samples"); 00057 00058 declareOption(ol, "localdistr", &LocallyWeightedDistribution::localdistr, OptionBase::buildoption, 00059 "The distribution that will be trianed with local weights"); 00060 00061 // Now call the parent class' declareOptions 00062 inherited::declareOptions(ol); 00063 } 00064 00065 void LocallyWeightedDistribution::build_() 00066 { 00067 // ### This method should do the real building of the object, 00068 // ### according to set 'options', in *any* situation. 00069 // ### Typical situations include: 00070 // ### - Initial building of an object from a few user-specified options 00071 // ### - Building of a "reloaded" object: i.e. from the complete set of all serialised options. 00072 // ### - Updating or "re-building" of an object after a few "tuning" options have been modified. 00073 // ### You should assume that the parent class' build_() has already been called. 00074 00075 if(weightsize()!=0 && weightsize()!=1) 00076 PLERROR("In LocallyWeightedDistribution::build_, weightsize must be 0 or 1"); 00077 00078 localdistr->inputsize_ = inputsize_; 00079 localdistr->weightsize_ = 1; 00080 localdistr->build(); 00081 } 00082 00083 // ### Nothing to add here, simply calls build_ 00084 void LocallyWeightedDistribution::build() 00085 { 00086 inherited::build(); 00087 build_(); 00088 } 00089 00090 00091 void LocallyWeightedDistribution::train(VMat training_set) 00092 { 00093 if(training_set.width() != inputsize()+weightsize()) 00094 PLERROR("In LocallyWeightedDistribution::train width of training set is different from inputsize()+weightsize()"); 00095 setTrainingSet(training_set); 00096 } 00097 00098 00099 void LocallyWeightedDistribution::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00100 { 00101 Distribution::makeDeepCopyFromShallowCopy(copies); 00102 00103 // ### Call deepCopyField on all "pointer-like" fields 00104 // ### that you wish to be deepCopied rather than 00105 // ### shallow-copied. 00106 // ### ex: 00107 // deepCopyField(trainvec, copies); 00108 00109 // ### Remove this line when you have fully implemented this method. 00110 PLERROR("LocallyWeightedDistribution::makeDeepCopyFromShallowCopy not fully (correctly) implemented yet!"); 00111 } 00112 00113 00114 double LocallyWeightedDistribution::log_density(const Vec& x) const 00115 { 00116 int l = train_set.length(); 00117 int w = inputsize(); 00118 weights.resize(l); 00119 // 'weights' will contain the "localization" weights for the current test point. 00120 trainsample.resize(w+weightsize()); 00121 Vec input = trainsample.subVec(0,w); 00122 00123 for(int i=0; i<l; i++) 00124 { 00125 train_set->getRow(i,trainsample); 00126 real weight = weighting_kernel(x,input); 00127 if(weightsize()==1) 00128 weight *= trainsample[w]; 00129 weights[i] = weight; 00130 } 00131 00132 VMat weight_column(columnmatrix(weights)); 00133 00134 VMat weighted_trainset; 00135 if(weightsize()==0) // append weight column 00136 weighted_trainset = hconcat(train_set, weight_column); 00137 else // replace last column by weight column 00138 weighted_trainset = hconcat(train_set.subMatColumns(0,inputsize()), weight_column); 00139 00140 localdistr->forget(); 00141 localdistr->train(weighted_trainset); 00142 return localdistr->log_density(x); 00143 } 00144 00145 00146 } // end of namespace PLearn 00147 00148 00149 /* 00150 Local Variables: 00151 mode:c++ 00152 c-basic-offset:4 00153 c-file-style:"stroustrup" 00154 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00155 indent-tabs-mode:nil 00156 fill-column:79 00157 End: 00158 */ 00159 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :