PLearn 0.1
KNNVMatrix.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // KNNVMatrix.cc
00004 //
00005 // Copyright (C) 2004 Olivier Delalleau
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 /* *******************************************************
00036  * $Id: KNNVMatrix.cc 8617 2008-03-03 17:45:54Z nouiz $
00037  ******************************************************* */
00038 
00039 // Authors: Olivier Delalleau
00040 
00043 #include <plearn/ker/DistanceKernel.h>
00044 #include <plearn/base/tostring.h>
00045 #include "KNNVMatrix.h"
00046 #include "SelectRowsVMatrix.h"
00047 #include "SubVMatrix.h"
00048 
00049 namespace PLearn {
00050 using namespace std;
00051 
00053 // KNNVMatrix //
00055 KNNVMatrix::KNNVMatrix()
00056     : knn(6),
00057       report_progress(1)
00058 {}
00059 
00060 PLEARN_IMPLEMENT_OBJECT(KNNVMatrix,
00061                         "A VMatrix that sees the nearest neighbours of each sample in the source VMat.",
00062                         "Each sample is followed by its (knn-1) nearest neighbours.\n"
00063                         "To each row is appended an additional target, which is:\n"
00064                         " - 1 if it is the first of a bag of neighbours,\n"
00065                         " - 2 if it is the last of a bag,\n"
00066                         " - 0 if it is none of these,\n"
00067                         " - 3 if it is both (only for knn == 1).\n"
00068                         "In addition, if a kernel_pij kernel is provided,, in the input part of the VMatrix\n"
00069                         "is appended p_ij, where\n"
00070                         "  p_ij = K(x_i,x_j) / \\sum_{k \\in knn(i), k != i} K(x_i,x_k)\n"
00071                         "where K = kernel_pij, and j != i (for j == i, p_ij = -1).");
00072 
00074 // declareOptions //
00076 void KNNVMatrix::declareOptions(OptionList& ol)
00077 {
00078     declareOption(ol, "k_nn_mat", &KNNVMatrix::k_nn_mat, OptionBase::buildoption,
00079                   "TODO comment");
00080 
00081     declareOption(ol, "knn", &KNNVMatrix::knn, OptionBase::buildoption,
00082                   "The number of nearest neighbours to consider (including the point itself).");
00083 
00084     declareOption(ol, "kernel_pij", &KNNVMatrix::kernel_pij, OptionBase::buildoption,
00085                   "An optional kernel used to compute the pij weights (see help).");
00086 
00087     declareOption(ol, "report_progress", &KNNVMatrix::report_progress, OptionBase::buildoption,
00088                   "TODO comment");
00089 
00090 // Kinda useless to declare it as an option if we recompute it in build().
00091 // TODO See how to be more efficient.
00092 //  declareOption(ol, "nn", &KNNVMatrix::nn, OptionBase::learntoption,
00093 //      "The matrix containing the index of the knn nearest neighbours of\n"
00094 //      "each data point.");
00095 
00096     // Now call the parent class' declareOptions
00097     inherited::declareOptions(ol);
00098 }
00099 
00101 // build //
00103 void KNNVMatrix::build()
00104 {
00105     inherited::build();
00106     build_();
00107 }
00108 
00110 // build_ //
00112 void KNNVMatrix::build_() {
00113     updateMtime(source);
00114     updateMtime(k_nn_mat);
00115     if (source) {
00116         int n = source->length();
00117         bool recompute_nn = true;
00118         if (k_nn_mat) {
00119             if (k_nn_mat->length() > 0) {
00120                 // We are given precomputed k nearest neighbours, what a good news.
00121                 if (k_nn_mat->length() == source->length()) {
00122                     if (k_nn_mat->width() < knn) {
00123                         PLWARNING("In KNNVMatrix::build_ - Not enough neighbours in the given k_nn_mat, will recompute nearest neighbours");
00124                     } else {
00125                         // Looks like this is the right thing.
00126                         recompute_nn = false;
00127                         nn.resize(n, knn);
00128                         for (int i = 0; i < n; i++) {
00129                             k_nn_mat->getSubRow(i, 0, nn(i));
00130                         }
00131                     }
00132                 } else {
00133                     // Lengths differ: maybe the source VMat is a subset of the matrix
00134                     // whose nearest neighbours have been computed.
00135                     // Let's try a SelectRowsVMatrix.
00136                     PP<SelectRowsVMatrix> smat = dynamic_cast<SelectRowsVMatrix*>((VMatrix*) source);
00137                     if (!smat.isNull() && smat->source->length() == k_nn_mat->length()) {
00138                         // Bingo !
00139                         // Safety warning just in case it is not what we want.
00140                         PLWARNING("In KNNVMatrix::build_ - Will consider the given k_nn_mat has been computed on source's distr VMat");
00141                         recompute_nn = false;
00142                         // Now we need to retrieve the nearest neighbours within the SelectRowsVMatrix.
00143                         nn.resize(n, knn);
00144                         Vec store_nn(k_nn_mat->width());
00145                         for (int i = 0; i < n; i++) {
00146                             nn(i,0) = i;  // The nearest neighbour is always itself.
00147                             k_nn_mat->getRow(smat->indices[i], store_nn);
00148                             int k = 1;
00149                             for (int j = 1; j < knn; j++) {
00150                                 bool ok = false;
00151                                 while (!ok && k < store_nn.length()) {
00152                                     int q = smat->indices.find(int(store_nn[k]));
00153                                     if (q >= 0) {
00154                                         // The k-th nearest neighbour in smat->distr is in smat.
00155                                         ok = true;
00156                                         nn(i,j) = q;
00157                                     }
00158                                     k++;
00159                                 }
00160                                 if (k == store_nn.length()) {
00161                                     // We didn't find the j-th nearest neighbour.
00162                                     PLERROR("In KNNVMatrix::build_ - Not enough neighbours in the SelectRowsVMatrix");
00163                                 }
00164                             }
00165                         }
00166                     } else {
00167                         // Maybe it's a SubVMatrix of the matrix whose nearest neighbours have been computed.
00168                         PP<SubVMatrix> smat_sub = dynamic_cast<SubVMatrix*>((VMatrix*) source);
00169                         if (    !smat_sub.isNull()
00170                                 &&  smat_sub->source->length() == k_nn_mat->length()
00171                                 &&  smat_sub->width() == smat_sub->source->width()) {
00172                             // Bingo !
00173                             // Safety warning just in case it is not what we want.
00174                             PLWARNING("In KNNVMatrix::build_ - Will consider the given k_nn_mat has been computed on source's parent VMat");
00175                             recompute_nn = false;
00176                             nn.resize(n, knn);
00177                             Vec store_nn(k_nn_mat->width());
00178                             for (int i = 0; i < n; i++) {
00179                                 nn(i,0) = i;  // The nearest neighbour is always itself.
00180                                 k_nn_mat->getRow(i + smat_sub->istart, store_nn);
00181                                 int k = 1;
00182                                 for (int j = 1; j < knn; j++) {
00183                                     bool ok = false;
00184                                     while (!ok && k < store_nn.length()) {
00185                                         int q = int(store_nn[k]) - smat_sub->istart;
00186                                         if (q >= 0 && q < smat_sub->length()) {
00187                                             // The k-th nearest neighbour in
00188                                             // smat_sub->source is in smat_sub.
00189                                             ok = true;
00190                                             nn(i,j) = q - smat_sub->istart;
00191                                         }
00192                                         k++;
00193                                     }
00194                                     if (k == store_nn.length()) {
00195                                         // We didn't find the j-th nearest neighbour.
00196                                         PLERROR("In KNNVMatrix::build_ - Not enough neighbours in the SubVMatrix");
00197                                     }
00198 
00199                                 }
00200                             }
00201                         } else {
00202                             // What the hell is this ?
00203                             PLWARNING("In KNNVMatrix::build_ - Don't know what to do with k_nn_mat, will recompute the nearest neighbours");
00204                         }
00205                     }
00206                 }
00207             }
00208         }
00209 
00210         if (recompute_nn) {
00211             // First make sure we can store the result if needed.
00212             if (k_nn_mat) {
00213                 if (k_nn_mat->length() > 0) {
00214                     PLERROR("In KNNVMatrix::build_ - The given k_nn_mat already has data, free it first");
00215                 }
00216             }
00217             // Compute the pairwise distances.
00218             DistanceKernel dk(2);
00219             if (report_progress) {
00220                 dk.report_progress = true;
00221                 dk.build();
00222             }
00223             dk.setDataForKernelMatrix(source);
00224             Mat distances(n,n);
00225             dk.computeGramMatrix(distances);
00226             // Deduce the nearest neighbours.
00227             nn = dk.computeNeighbourMatrixFromDistanceMatrix(distances);
00228             // Only keep the (knn) nearest ones.
00229             // TODO Free the memory used by the other neighbours.
00230             // TODO Make the matrix be a TMat<int> instead of a Mat.
00231             nn.resize(n, knn);
00232             // Store the result.
00233             if (k_nn_mat) {
00234                 for (int i = 0; i < n; i++) {
00235                     k_nn_mat->appendRow(nn(i));
00236                 }
00237             }
00238         }
00239 
00240         // Initialize correctly the various fields.
00241         targetsize_ = source->targetsize() + 1;
00242         length_ = n * knn;
00243         width_ = source->width() + 1;
00244         setMetaInfoFromSource();
00245 
00246         // Compute the p_ij if needed.
00247         if (kernel_pij) {
00248             // TODO REPORT PROGRESS IF NEEDED.
00249             inputsize_++;
00250             width_++;
00251             kernel_pij->setDataForKernelMatrix(source);
00252             int l = source->length();
00253             pij.resize(l, knn-1);
00254             for (int i = 0; i < l; i++) {
00255                 real sum = 0;
00256                 real k_ij;
00257                 for (int j = 1; j < knn; j++) {
00258                     // We omit the first nearest neighbour, which is the point itself.
00259                     k_ij = kernel_pij->evaluate_i_j(i, int(nn(i,j)));
00260                     pij(i,j-1) = k_ij;
00261                     sum += k_ij;
00262                 }
00263                 pij.row(i) /= sum;
00264             }
00265         }
00266     }
00267 }
00268 
00270 // makeDeepCopyFromShallowCopy //
00272 void KNNVMatrix::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00273 {
00274     inherited::makeDeepCopyFromShallowCopy(copies);
00275 
00276     // ### Call deepCopyField on all "pointer-like" fields
00277     // ### that you wish to be deepCopied rather than
00278     // ### shallow-copied.
00279     // ### ex:
00280     // deepCopyField(trainvec, copies);
00281 
00282     deepCopyField(source_row, copies);
00283     deepCopyField(nn, copies);
00284     deepCopyField(pij, copies);
00285     // Currently commented out because some of the VMats used for k_nn_mat
00286     // may not implement deep copy correctly.
00287     // TODO Put back when other VMats are fine.
00288 //  deepCopyField(k_nn_mat, copies);
00289     deepCopyField(kernel_pij, copies);
00290 
00291     PLWARNING("In KNNVMatrix::makeDeepCopyFromShallowCopy - k_nn_mat will not be deep copied");
00292     //  PLERROR("KNNVMatrix::makeDeepCopyFromShallowCopy not fully (correctly) implemented yet!");
00293 }
00294 
00296 // getSourceIndexOf //
00298 int KNNVMatrix::getSourceIndexOf(int i, int& i_ref, int& i_n) const {
00299     i_ref = i / knn;
00300     i_n = i % knn;
00301     int i_neighbour_source = int(nn(i_ref, i_n));
00302     return i_neighbour_source;
00303 }
00304 
00306 // getNewRow //
00308 void KNNVMatrix::getNewRow(int i, const Vec& v) const {
00309     source_row.resize(source->width());
00310     int i_n;
00311     int i_ref;
00312     int real_i = getSourceIndexOf(i, i_ref, i_n);
00313     source->getRow(real_i, source_row);
00314     if (kernel_pij) {
00315         v.subVec(0, source->inputsize()) << source_row.subVec(0, source->inputsize());
00316         if (i_n > 0) {
00317             v[source->inputsize()] = pij(i_ref, i_n - 1);
00318         } else {
00319             v[source->inputsize()] = -1;
00320         }
00321     } else {
00322         v.subVec(0, source->inputsize() + source->targetsize())
00323             << source_row.subVec(0, source->inputsize() + source->targetsize());
00324     }
00325     v.subVec(inputsize(), source->targetsize())
00326         << source_row.subVec(source->inputsize(), source->targetsize());
00327     v[inputsize() + source->targetsize()] = getTag(i_n);
00328     if (weightsize() > 0) {
00329         v.subVec(inputsize() + targetsize(), weightsize())
00330             << source_row.subVec(source->inputsize() + source->targetsize(), source->weightsize());
00331     }
00332 }
00333 
00335 // getTag //
00337 int KNNVMatrix::getTag(int p) const {
00338     // TODO Better use the constants defined in SumOverBagsVariable.h.
00339     if (knn == 1) return 3;
00340     if (p == 0) return 1;
00341     if (p == knn - 1) return 2;
00342     return 0;
00343 }
00344 
00345 } // end of namespace PLearn
00346 
00347 
00348 /*
00349   Local Variables:
00350   mode:c++
00351   c-basic-offset:4
00352   c-file-style:"stroustrup"
00353   c-file-offsets:((innamespace . 0)(inline-open . 0))
00354   indent-tabs-mode:nil
00355   fill-column:79
00356   End:
00357 */
00358 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines