PLearn 0.1
MemoryCachedKernel.h
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // MemoryCachedKernel.h
00004 //
00005 // Copyright (C) 2007 Nicolas Chapados
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Nicolas Chapados
00036 
00040 #ifndef MemoryCachedKernel_INC
00041 #define MemoryCachedKernel_INC
00042 
00043 #include <plearn/ker/Kernel.h>
00044 
00045 namespace PLearn {
00046 
00070 class MemoryCachedKernel : public Kernel
00071 {
00072     typedef Kernel inherited;
00073 
00074 public:
00075     //#####  Public Build Options  ############################################
00076 
00082     int m_cache_threshold;
00083 
00084 public:
00085     //#####  Public Member Functions  #########################################
00086 
00088     MemoryCachedKernel();
00089 
00090 
00091     //#####  Kernel Member Functions  #########################################
00092 
00095     virtual void setDataForKernelMatrix(VMat the_data);
00096 
00098     virtual void addDataForKernelMatrix(const Vec& newRow);
00099 
00101     bool dataCached() const { return m_data_cache.size() > 0; }
00102 
00103 
00104     //#####  PLearn::Object Protocol  #########################################
00105 
00106     // Declares other standard object methods.
00107     PLEARN_DECLARE_ABSTRACT_OBJECT(MemoryCachedKernel);
00108 
00109     // Simply calls inherited::build() then build_()
00110     virtual void build();
00111 
00113     virtual void makeDeepCopyFromShallowCopy(CopiesMap& copies);
00114 
00115 protected:
00116     //#####  Protected Member Functions  ######################################
00117 
00119     static void declareOptions(OptionList& ol);
00120 
00127     inline void dataRow(int i, Vec& row) const;
00128 
00134     inline Vec* dataRow(int i) const;
00135 
00143     template <class DerivedClass>
00144     void computeGramMatrixNV(Mat K, const DerivedClass* This) const;
00145 
00172     template <class DerivedClass,
00173               real (DerivedClass::*derivativeFunc)(int, int, int, real) const>
00174     void computeGramMatrixDerivNV(Mat& KD, const DerivedClass* This, int arg,
00175                                   bool derivative_func_requires_K = true) const;
00176 
00181     template <class DerivedClass>
00182     void evaluateAllIXNV(const Vec& x, const Vec& k_xi_x, int istart) const;
00183     
00184     
00185 private:
00187     void build_();
00188 
00189 protected:
00191     Mat m_data_cache;
00192 
00195     TVec<Vec> m_row_cache;
00196 };
00197 
00198 // Declares a few other classes and functions related to this class
00199 DECLARE_OBJECT_PTR(MemoryCachedKernel);
00200 
00201 
00202 //#####  dataRow  #############################################################
00203 
00204 inline void MemoryCachedKernel::dataRow(int i, Vec& row) const
00205 {
00206     if (m_data_cache.isNotNull()) {
00207         row = m_data_cache(i);
00208         row.subVecSelf(0, dataInputsize());
00209     }
00210     else {
00211         row.resize(dataInputsize());
00212         data->getSubRow(i, 0, row);
00213     }
00214 }
00215 
00216 inline Vec* MemoryCachedKernel::dataRow(int i) const
00217 {
00218     // Note: ASSUME that the cache exists; will boundcheck in dbg/safeopt if
00219     // not.
00220     return &m_row_cache[i];
00221 }
00222 
00223 
00224 //#####  computeGramMatrixNV  #################################################
00225 
00226 template <class DerivedClass>
00227 void MemoryCachedKernel::computeGramMatrixNV(Mat K, const DerivedClass* This) const
00228 {
00229     if (!data)
00230         PLERROR("Kernel::computeGramMatrix: setDataForKernelMatrix not yet called");
00231     if (!is_symmetric)
00232         PLERROR("Kernel::computeGramMatrix: not supported for non-symmetric kernels");
00233     if (K.length() != data.length() || K.width() != data.length())
00234         PLERROR("Kernel::computeGramMatrix: the argument matrix K should be\n"
00235                 "of size %d x %d (currently of size %d x %d)",
00236                 data.length(), data.length(), K.length(), K.width());
00237     if (cache_gram_matrix && gram_matrix_is_cached) {
00238         K << gram_matrix;
00239         return;
00240     }
00241                 
00242     int l=data->length();
00243     int m=K.mod();
00244     PP<ProgressBar> pb;
00245     int count = 0;
00246     if (report_progress)
00247         pb = new ProgressBar("Computing Gram matrix for " + classname(),
00248                              (l * (l + 1)) / 2);
00249 
00250     Vec row_i, row_j;
00251     real Kij;
00252     real* Ki;
00253     real* Kji;
00254     for (int i=0 ; i<l ; ++i) {
00255         Ki = K[i];
00256         Kji = &K[0][i];
00257         dataRow(i, row_i);
00258         for (int j=0; j<=i; ++j, Kji += m) {
00259             dataRow(j, row_j);
00260             Kij = This->DerivedClass::evaluate(row_i, row_j);
00261             *Ki++ = Kij;
00262             if (j<i)
00263                 *Kji = Kij;
00264         }
00265         if (report_progress) {
00266             count += i + 1;
00267             PLASSERT( pb );
00268             pb->update(count);
00269         }
00270     }
00271     if (cache_gram_matrix) {
00272         gram_matrix.resize(l,l);
00273         gram_matrix << K;
00274         gram_matrix_is_cached = true;
00275     }
00276 }
00277 
00278 
00279 //#####  computeGramMatrixDerivNV  ############################################
00280 
00281 template <class DerivedClass,
00282           real (DerivedClass::*derivativeFunc)(int, int, int, real) const>
00283 void MemoryCachedKernel::computeGramMatrixDerivNV(Mat& KD, const DerivedClass* This,
00284                                                   int arg, bool require_K) const
00285 {
00286     if (!data)
00287         PLERROR("Kernel::computeGramMatrixDerivative: "
00288                 "setDataForKernelMatrix not yet called");
00289     if (!is_symmetric)
00290         PLERROR("Kernel::computeGramMatrixDerivative: "
00291                 "not supported for non-symmetric kernels");
00292 
00293     int W = nExamples();
00294     KD.resize(W,W);
00295     
00296     real KDij;
00297     real* KDi;
00298     real  K  = MISSING_VALUE;
00299     real* Ki = 0;                       // Current row of kernel matrix, if cached
00300 
00301     for (int i=0 ; i<W ; ++i) {
00302         KDi  = KD[i];
00303         if (gram_matrix_is_cached)
00304             Ki = gram_matrix[i];
00305         
00306         for (int j=0 ; j <= i ; ++j) {
00307             // Access the current kernel value depending on whether it's cached
00308             if (Ki)
00309                 K = *Ki++;
00310             else if (require_K) {
00311                 Vec& row_i = *dataRow(i);
00312                 Vec& row_j = *dataRow(j);
00313                 K = This->DerivedClass::evaluate(row_i, row_j);
00314             }
00315 
00316             // Compute and store the derivative
00317             KDij   = (This->*derivativeFunc)(i, j, arg, K);
00318             *KDi++ = KDij;
00319         }
00320     }
00321 }
00322 
00323 
00324 //#####  evaluateAllIXNV  #####################################################
00325 
00326 template <class DerivedClass>
00327 void MemoryCachedKernel::evaluateAllIXNV(const Vec& x, const Vec& k_xi_x, int istart) const
00328 {
00329     if (!data)
00330         PLERROR("Kernel::computeGramMatrix: setDataForKernelMatrix not yet called");
00331 
00332     const DerivedClass* This = static_cast<const DerivedClass*>(this);
00333     int l = min(data->length(), k_xi_x.size());
00334     Vec row_i;
00335     real* k_xi = &k_xi_x[0];
00336     
00337     for (int i=istart ; i<l ; ++i) {
00338         dataRow(i, row_i);
00339         *k_xi++ = This->DerivedClass::evaluate(row_i, x);
00340     }
00341 }
00342 
00343 
00344 } // end of namespace PLearn
00345 
00346 #endif
00347 
00348 
00349 /*
00350   Local Variables:
00351   mode:c++
00352   c-basic-offset:4
00353   c-file-style:"stroustrup"
00354   c-file-offsets:((innamespace . 0)(inline-open . 0))
00355   indent-tabs-mode:nil
00356   fill-column:79
00357   End:
00358 */
00359 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines