PLearn 0.1
TestImputations.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 
00003 // TestImputations.cc
00004 //
00005 // Copyright (C) 2006 Dan Popovici, Pascal Lamblin
00006 //
00007 // Redistribution and use in source and binary forms, with or without
00008 // modification, are permitted provided that the following conditions are met:
00009 //
00010 //  1. Redistributions of source code must retain the above copyright
00011 //     notice, this list of conditions and the following disclaimer.
00012 //
00013 //  2. Redistributions in binary form must reproduce the above copyright
00014 //     notice, this list of conditions and the following disclaimer in the
00015 //     documentation and/or other materials provided with the distribution.
00016 //
00017 //  3. The name of the authors may not be used to endorse or promote
00018 //     products derived from this software without specific prior written
00019 //     permission.
00020 //
00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00031 //
00032 // This file is part of the PLearn library. For more information on the PLearn
00033 // library, go to the PLearn Web site at www.plearn.org
00034 
00035 // Authors: Dan Popovici
00036 
00039 #define PL_LOG_MODULE_NAME "TestImputations"
00040 
00041 #include "TestImputations.h"
00042 #include <plearn/io/pl_log.h>
00043 
00044 namespace PLearn {
00045 using namespace std;
00046 
00047 PLEARN_IMPLEMENT_OBJECT(
00048     TestImputations,
00049     "Computes imputations errors using various imputation methods.",
00050     "name of the discrete variable, of the target and the values to check are options.\n"
00051 );
00052 
00054 // TestImputations //
00056 TestImputations::TestImputations()
00057 {
00058 }
00059     
00061 // declareOptions //
00063 void TestImputations::declareOptions(OptionList& ol)
00064 {
00065 
00066     declareOption(ol, "min_number_of_samples", &TestImputations::min_number_of_samples,
00067                   OptionBase::buildoption,
00068                   "The minimum number of samples required to test imputations for a variable.");
00069     declareOption(ol, "max_number_of_samples", &TestImputations::max_number_of_samples,
00070                   OptionBase::buildoption,
00071                   "The maximum number of samples used to test imputations for a variable.");
00072     declareOption(ol, "mean_median_mode_file_name", &TestImputations::mean_median_mode_file_name,
00073                   OptionBase::buildoption,
00074                   "The Path of the file with those statistics for all the variables.");
00075     declareOption(ol, "tree_conditional_mean_directory", &TestImputations::tree_conditional_mean_directory,
00076                   OptionBase::buildoption,
00077                   "The Path of the dircetory containing the tree conditional means computed for each variable.");
00078     declareOption(ol, "covariance_preservation_file_name", &TestImputations::covariance_preservation_file_name,
00079                   OptionBase::buildoption,
00080                   "The Path of the file with the train_set empirically observed covariances and means.");
00081     declareOption(ol, "reference_set_with_covpres", &TestImputations::reference_set_with_covpres,
00082                   OptionBase::buildoption,
00083                   "The reference set corresponding to the index computed with the ball_tree, with the initial imputations.");
00084     declareOption(ol, "reference_set_with_missing", &TestImputations::reference_set_with_missing,
00085                   OptionBase::buildoption,
00086                   "The reference set corresponding to the index computed with the ball_tree, with missing values.");
00087     declareOption(ol, "missing_indicators", &TestImputations::missing_indicators,
00088                   OptionBase::buildoption,
00089                   "The vector of missing indicator field names to be excluded in the distance computation.");
00090 
00091     inherited::declareOptions(ol);
00092 }
00093 
00095 // makeDeepCopyFromShallowCopy //
00097 void TestImputations::makeDeepCopyFromShallowCopy(CopiesMap& copies)
00098 {
00099     deepCopyField(min_number_of_samples, copies);
00100     deepCopyField(max_number_of_samples, copies);
00101     deepCopyField(mean_median_mode_file_name, copies);
00102     deepCopyField(tree_conditional_mean_directory, copies);
00103     deepCopyField(covariance_preservation_file_name, copies);
00104     deepCopyField(reference_set_with_covpres, copies);
00105     deepCopyField(reference_set_with_missing, copies);
00106     deepCopyField(missing_indicators, copies);
00107     inherited::makeDeepCopyFromShallowCopy(copies);
00108 
00109 }
00110 
00112 // build //
00114 void TestImputations::build()
00115 {
00116     // ### Nothing to add here, simply calls build_().
00117     inherited::build();
00118     build_();
00119 }
00120 
00122 // build_ //
00124 void TestImputations::build_()
00125 {
00126 /*
00127   for each variable with missing values in the train set(which is in this case the test set)
00128     - radomly choose up to n samples with a value in the variable
00129     - build a set with these samples replacing the value with missing
00130     - perform the various type of imputations and compute the errors
00131   valider meanmedianmode, treeconditionalmean  covariancepreservation, neighborhood
00132   create a Mat: width is #of variables with missing values
00133   row 0: nb_present
00134   row 1: mean/mode imputation from preprocessing/final_train_input_preprocessed.pmat.metadata/mean_median_mode_file.pmat
00135   row 2: median/mode imputation from preprocessing/final_train_input_preprocessed.pmat.metadata/mean_median_mode_file.pmat
00136   row 3: mode
00137   row 4: treeconditionalmean imputation from prep/data/targeted_ind_no_imp.vmat.metadata/TreeCondMean/dir/'field_names'/Split0/test1_outputs.pmat
00138   row 5: covariance preservation imputation from preprocessing/final_train_input_preprocessed.pmat.metadata/covariance_file.pmat
00139   row 6 to 24: (row - 4) * i neighbors imputation from neighborhood/test_train_imputed_with_covariance_preservation.pmat.metadata/neighborhood_file.pmat
00140   lire le train_set
00141 */
00142     int nb_neighbors=100;
00143     MODULE_LOG << "build_() called" << endl;
00144     if (train_set)
00145     {
00146         build_ball_tree(nb_neighbors*3);
00147         output_file_name = train_metadata + "/TestImputation2/output.pmat";
00148         for (int iteration = 1; iteration <= train_set->width(); iteration++)
00149         {
00150             cout << "In TestImputations, Iteration # " << iteration << endl;
00151             initialize();
00152             computeMeanMedianModeStats();
00153             computeTreeCondMeanStats();
00154             computeCovPresStats();
00155             computeNeighborhoodStats(nb_neighbors,nb_neighbors*3);
00156             train();
00157         }
00158         endtestimputation("In TestImputations::build_(): we are done here");
00159     }
00160 }
00161 
00162 void TestImputations::build_ball_tree(int nb_neighbors)
00163 { 
00164    // initialize primary dataset
00165     cout << "initialize the train set" << endl;
00166     train_length = train_set->length();
00167     train_width = train_set->width();
00168     train_input.resize(train_width);
00169     train_names.resize(train_width);
00170     train_names << train_set->fieldNames();
00171     train_metadata = train_set->getMetaDataDir();
00172     weights.resize(train_width);
00173     weights.fill(1.0);
00174     for (int mi_col = 0; mi_col < missing_indicators.length(); mi_col++)
00175     {
00176         int train_col;
00177         for (train_col = 0; train_col < train_width; train_col++)
00178         {
00179             if (missing_indicators[mi_col] != train_names[train_col]) continue;
00180             weights[train_col] = 0.0;
00181             break;
00182         }
00183         if (train_col >= train_width)
00184             PLERROR("In TestImputations::build_ball_tree():: no field with this name in input dataset: %s", (missing_indicators[mi_col]).c_str());
00185     }
00186     weighted_distance_kernel = new WeightedDistance(weights);
00187 /*
00188     if (!reference_set_with_covpres) PLERROR("In TestImputations::build_ball_tree() no reference_set_with_covpres provided.");
00189     if (!reference_set_with_missing) PLERROR("In TestImputations::build_ball_tree() no reference_set_with_missing provided.");
00190     ball_tree = new BallTreeNearestNeighbors();
00191     ball_tree->setOption("rmin", "1");
00192     ball_tree->setOption("train_method", "anchor");
00193     ball_tree->setOption("num_neighbors", "100");
00194     ball_tree->setOption("copy_input", "0");
00195     ball_tree->setOption("copy_target", "0");
00196     ball_tree->setOption("copy_weight", "0");
00197     ball_tree->setOption("copy_index", "1");
00198     ball_tree->setOption("nstages", "-1");
00199     ball_tree->setOption("report_progress", "1");
00200     ball_tree->setTrainingSet(reference_set_with_covpres, true);
00201     ball_tree->train();
00202     ref_cov = reference_set_with_covpres->toMat();
00203     ref_mis = reference_set_with_missing->toMat();
00204 */
00205     if (!reference_set_with_covpres) PLERROR("In TestImputations::build_ball_tree() no reference_set_with_covpres provided.");
00206     if (!reference_set_with_missing) PLERROR("In TestImputations::build_ball_tree() no reference_set_with_missing provided.");
00207     ball_tree = new ExhaustiveNearestNeighbors();
00208     ball_tree->setOption("num_neighbors", tostring(nb_neighbors));
00209     ball_tree->setOption("copy_input", "0");
00210     ball_tree->setOption("copy_target", "0");
00211     ball_tree->setOption("copy_weight", "0");
00212     ball_tree->setOption("copy_index", "1");
00213     ball_tree->setOption("nstages", "-1");
00214     ball_tree->setOption("report_progress", "1");
00215     ball_tree->distance_kernel = weighted_distance_kernel;
00216     ball_tree->setTrainingSet(reference_set_with_covpres, true);
00217     ball_tree->train();
00218     ref_cov = reference_set_with_covpres->toMat();
00219     ref_mis = reference_set_with_missing->toMat();
00220 /*
00221 ExhaustiveNearestNeighbors(
00222 # bool: Whether the kernel defined by the 'distance_kernel' option should be
00223 # interpreted as a (pseudo-)distance measure (true) or a similarity
00224 # measure (false). Default = true.  Note that this interpretation is
00225 # strictly specific to the class ExhaustiveNearestNeighbors.
00226 kernel_is_pseudo_distance = 1  ;
00227 
00228 # Ker: Alternate name for 'distance_kernel'.  (Deprecated; use only so that
00229 # existing scripts can run.)
00230 kernel = *1 ->DistanceKernel(
00231 n = 2 ;
00232 pow_distance = 0 ;
00233 optimized = 0 ;
00234 is_symmetric = 1 ;
00235 report_progress = 0 ;
00236 specify_dataset = *0 ;
00237 cache_gram_matrix = 0 ;
00238 data_inputsize = -1 ;
00239 n_examples = -1  )
00240  ;
00241 
00242 # int: Number of nearest-neighbors to compute.  This is usually called "K".
00243 # The output vector is simply the concatenation of all found neighbors.
00244 # (Default = 1)
00245 num_neighbors = 1  ;
00246 
00247 # bool: If true, the output contains a copy of the found input vector(s).
00248 # (Default = false)
00249 copy_input = 0  ;
00250 
00251 # bool: If true, the output contains a copy of the found target vector(s).
00252 # (Default = true)
00253 copy_target = 1  ;
00254 
00255 # bool: If true, the output contains a copy of the found weight.  If no
00256 # weight is present in the training set, a weight of 1.0 is put.
00257 # (Default = true)
00258 copy_weight = 0  ;
00259 
00260 # bool: If true, the output contains the index of the found neighbor
00261 # (as the row number, zero-based, in the training set.)
00262 # (Default = false)
00263 copy_index = 0  ;
00264 
00265 # Ker: An optional alternative to the Euclidean distance (DistanceKernel with
00266 # n=2 and pow_distance=1).  It should be a 'distance-like' kernel rather
00267 # than a 'dot-product-like' kernel, i.e. small when the arguments are
00268 # similar, and it should always be non-negative, and 0 only if arguments
00269 # are equal.
00270 distance_kernel = *1 ->DistanceKernel(
00271 n = 2 ;
00272 pow_distance = 0 ;
00273 optimized = 0 ;
00274 is_symmetric = 1 ;
00275 report_progress = 0 ;
00276 specify_dataset = *0 ;
00277 cache_gram_matrix = 0 ;
00278 data_inputsize = -1 ;
00279 n_examples = -1  )
00280  ;
00281 */
00282 
00283 }
00284 void TestImputations::endtestimputation(const char* msg, ...){
00285     va_list args;
00286     va_start(args,msg);
00287     getHeaderRecord();
00288     for (int train_col = 0; train_col < train_width; train_col++)
00289     {
00290         if (header_record[train_col] == 1.0) 
00291             PLWARNING("No all variable done!!!");
00292         else if (header_record[train_col] == 2.0){
00293             getOutputRecord(train_col);
00294             if(output_record[100]==0.0)
00295                 PLWARNING("Element %d,%d is at zero in the output file. Meaby this variable was not treated",train_col,100);
00296         }
00297     }
00298     PLERROR(msg,args);
00299 }
00300 void TestImputations::initialize()
00301 {
00302     
00303     // initialize the header file
00304     cout << "initialize the header file" << endl;
00305     train_set->lockMetaDataDir();
00306     header_record.resize(train_width);
00307     header_file_name = train_metadata + "/TestImputation2/header.pmat";
00308     cout << "header_file_name: " << header_file_name << endl;
00309     if (!isfile(header_file_name)) createHeaderFile();
00310     else getHeaderRecord();
00311     
00312     // choose a variable to test imputations for
00313     cout << "choose a variable to test imputations for" << endl;
00314     to_deal_with_total = 0;
00315     to_deal_with_next = -1;
00316 
00317     for (int train_col = 0; train_col < train_width; train_col++)
00318     {
00319         if (header_record[train_col] != 1.0) continue;
00320         to_deal_with_total += 1;
00321         if (to_deal_with_next < 0) to_deal_with_next = train_col;
00322     }
00323     cout << "total number of variable left to deal with: " << to_deal_with_total << endl;
00324     if (to_deal_with_next < 0)
00325     {
00326         train_set->unlockMetaDataDir();
00327         // reviewGlobalStats();
00328         endtestimputation("In TestImputations::initialize() we are done here");
00329     }
00330     cout << "next variable to deal with: " << train_names[to_deal_with_next] << "("<<to_deal_with_next<<")"<<endl;
00331     to_deal_with_name = train_names[to_deal_with_next];
00332     updateHeaderRecord(to_deal_with_next);
00333     train_set->unlockMetaDataDir();
00334     
00335     // find the available samples with non-missing values for this variable
00336     train_stats = train_set->getStats(to_deal_with_next);
00337     train_total = (int)train_stats.n();
00338     train_missing = (int)train_stats.nmissing();
00339     train_present = train_total - train_missing;
00340     indices.resize((int) train_present);
00341     int ind_next = 0;
00342     ProgressBar* pb = new ProgressBar( "Building the indices for " + to_deal_with_name, train_length);
00343     for (int train_row = 0; train_row < train_length; train_row++)
00344     {
00345         to_deal_with_value = train_set->get(train_row, to_deal_with_next);
00346         if (is_missing(to_deal_with_value)) continue;
00347         if (ind_next >= indices.length()) 
00348             PLERROR("In TestImputations::initialize() There seems to be more present values than indicated by the stats file");
00349         indices[ind_next] = train_row;
00350         ind_next += 1;
00351         pb->update( train_row );
00352     }
00353     delete pb;
00354     
00355     // shuffle the indices.
00356     manual_seed(123456);
00357     shuffleElements(indices);
00358     
00359     // load the test samples for this variable
00360     if (indices.length() > max_number_of_samples) test_length = max_number_of_samples;
00361     else if (indices.length() < min_number_of_samples)
00362         PLERROR("TestImputations::initialize() Their is less examples(%d) for the variable %s then the min_number_of semples(%d)",
00363                 indices.length(),to_deal_with_name.c_str(),min_number_of_samples);
00364     else test_length = indices.length();
00365     test_width = train_width;
00366     test_samples_set = new MemoryVMatrix(test_length, test_width);
00367     pb = new ProgressBar( "Loading the test samples for " + to_deal_with_name, test_length);
00368     for (int test_row = 0; test_row < test_length; test_row++)
00369     {
00370         train_set->getRow(indices[test_row], train_input);
00371         test_samples_set->putRow(test_row, train_input);
00372         pb->update( test_row );
00373     }
00374     delete pb;
00375 }
00376 
00377 void TestImputations::computeMeanMedianModeStats()
00378 {
00379     if (!isfile(mean_median_mode_file_name)) PLERROR("In TestImputations::computeMeanMedianModeStats() a valid mean_median_mode_file path must be provided.");
00380     VMat mmmf_file = new FileVMatrix(mean_median_mode_file_name);
00381     int mmmf_length = mmmf_file->length();
00382     int mmmf_width = mmmf_file->width();
00383     if (mmmf_length != 3) PLERROR("In TestImputations::computeMeanMedianModeStats() there should be exactly 3 records in the mmm file, got %i.", mmmf_length);
00384     if (mmmf_width != train_width) PLERROR("In TestImputations::computeMeanMedianModeStats() train set and mmm width should be the same, got %i.", mmmf_width);
00385     real mmmf_mean = mmmf_file->get(0, to_deal_with_next);
00386     real mmmf_median = mmmf_file->get(1, to_deal_with_next);
00387     real mmmf_mode = mmmf_file->get(2, to_deal_with_next);
00388     mmmf_mean_err = 0.0;
00389     mmmf_median_err = 0.0;
00390     mmmf_mode_err = 0.0;
00391     ProgressBar* pb = new ProgressBar( "computing the mean, median and mode imputation errors for " + to_deal_with_name, test_length);
00392     for (int test_row = 0; test_row < test_length; test_row++)
00393     {
00394         to_deal_with_value = test_samples_set->get(test_row, to_deal_with_next);
00395         mmmf_mean_err += pow(to_deal_with_value - mmmf_mean, 2);
00396         mmmf_median_err += pow(to_deal_with_value - mmmf_median, 2);
00397         mmmf_mode_err += pow(to_deal_with_value - mmmf_mode, 2);
00398         pb->update( test_row );
00399     }
00400     delete pb;
00401     mmmf_mean_err = mmmf_mean_err / (real) test_length;
00402     mmmf_median_err = mmmf_median_err / (real) test_length;
00403     mmmf_mode_err = mmmf_mode_err / (real) test_length;
00404     //TODO check the formul
00405     //mmmf_mean_stddev = sqrt(mmmf_mean_err);
00406     //mmmf_median_stddev = sqrt(mmmf_median_err);
00407     //mmmf_mode_stddev = sqrt(mmmf_mode_err);
00408 
00409 }
00410 
00411 void TestImputations::computeTreeCondMeanStats()
00412 {
00413     tcmf_file_name = tree_conditional_mean_directory + "/" + to_deal_with_name + "/Split0/test1_outputs.pmat";
00414     if (!isfile(tcmf_file_name)) 
00415         PLERROR("In TestImputations::computeTreeCondMeanStats(): The '%s' file was not found in the tcf directory.",tcmf_file_name.c_str());
00416     tcmf_file = new FileVMatrix(tcmf_file_name);
00417     int tcmf_length = tcmf_file->length();
00418     if (tcmf_length < train_length) 
00419         PLERROR("In TestImputations::computeTreeCondMeanStats(): there are only %d records in the tree conditional output file. We need %d.",tcmf_length,train_length);
00420     tcmf_mean_err = 0.0;
00421     ProgressBar* pb = new ProgressBar( "computing the tree conditional mean imputation errors for " + to_deal_with_name, test_length);
00422     for (int test_row = 0; test_row < test_length; test_row++)
00423     {
00424         to_deal_with_value = test_samples_set->get(test_row, to_deal_with_next);
00425         tcmf_mean_err += pow(to_deal_with_value - tcmf_file->get(indices[test_row], 0), 2);
00426         pb->update( test_row );
00427     }
00428     delete pb;
00429     tcmf_mean_err = tcmf_mean_err / (real) test_length;
00430     //TODO check the formul
00431     //tcmf_mean_stddev = sqrt(tcmf_mean_err);
00432 }
00433 
00434 void TestImputations::computeCovPresStats()
00435 {
00436     if (!isfile(covariance_preservation_file_name)) PLERROR("In TestImputations::computeCovPresStats() a valid covariance_preservation_file path must be provided.");
00437     VMat cvpf_file = new FileVMatrix(covariance_preservation_file_name);
00438     int cvpf_length = cvpf_file->length();
00439     int cvpf_width = cvpf_file->width();
00440     if (cvpf_length != train_width + 1)
00441         PLERROR("In TestImputations::computeCovPresStats() there should be %i records in the cvp file, got %i.", train_width + 1, cvpf_length);
00442     if (cvpf_width != train_width)
00443         PLERROR("In TestImputations::computeCovPresStats() train set and cvp width should be the same, got %i.", cvpf_width);
00444     //cvpf_file = new FileVMatrix(covariance_preservation_file_name);
00445     cvpf_cov.resize(train_width, train_width);
00446     cvpf_mu.resize(train_width);
00447     for (int cvpf_row = 0; cvpf_row < train_width; cvpf_row++)
00448     {
00449         for (int cvpf_col = 0; cvpf_col < train_width; cvpf_col++)
00450         {
00451             cvpf_cov(cvpf_row, cvpf_col) = cvpf_file->get(cvpf_row, cvpf_col);
00452         }
00453     }
00454     for (int cvpf_col = 0; cvpf_col < train_width; cvpf_col++)
00455     {
00456         cvpf_mu[cvpf_col] = cvpf_file->get(train_width, cvpf_col);
00457     }
00458     cvpf_mean_err = 0.0;
00459     ProgressBar* pb = new ProgressBar( "computing the covariance preservation imputation errors for " + to_deal_with_name, test_length);
00460     for (int test_row = 0; test_row < test_length; test_row++)
00461     {
00462         test_samples_set->getRow(test_row, train_input);
00463         cvpf_mean_err += pow(to_deal_with_value - covariancePreservationValue(to_deal_with_next), 2);
00464         pb->update( test_row );
00465     }
00466     delete pb;
00467     cvpf_mean_err = cvpf_mean_err / (real) test_length;
00468     //TODO check the formul
00469     //cvpf_mean_stddev = sqrt(cvpf_mean_err);
00470 
00471 }
00472 
00473 real TestImputations::covariancePreservationValue(int col)
00474 {
00475     real cvpf_sum_cov_xl = 0;
00476     real cvpf_sum_xl_square = 0;
00477     for (int cvpf_col = 0; cvpf_col < train_width; cvpf_col++)
00478     {
00479         if (cvpf_col == col) continue;
00480         if (is_missing(train_input[cvpf_col])) continue;
00481         cvpf_sum_cov_xl += cvpf_cov(cvpf_col, col) * (train_input[cvpf_col] - cvpf_mu[cvpf_col]);
00482         cvpf_sum_xl_square += (train_input[cvpf_col] - cvpf_mu[cvpf_col]) * (train_input[cvpf_col] - cvpf_mu[cvpf_col]);
00483     }
00484     real cvpf_value;
00485     if (cvpf_sum_xl_square == 0.0) cvpf_value = cvpf_mu[col];
00486     else cvpf_value = cvpf_mu[col] + cvpf_sum_cov_xl / cvpf_sum_xl_square;
00487     return cvpf_value;
00488 }
00489 
00490 //nb_neighbors, the number of neighbors to calculate
00491 //max_miss_neigbors, the additional neighbors we found so that we have replacement neighbors for neighbors with missing value
00492 void TestImputations::computeNeighborhoodStats(int nb_neighbors,int max_miss_neigbors)
00493 {
00494     knnf_input.resize(train_width);
00495     knnf_neighbors.resize(nb_neighbors+max_miss_neigbors);
00496     knnf_mean_cov_err.resize(nb_neighbors);
00497     knnf_mean_miss_err.resize(nb_neighbors);
00498     knnf_nmiss_value_count.resize(nb_neighbors);
00499     knnf_mean_cov_err.clear();
00500     knnf_mean_miss_err.clear();
00501     knnf_nmiss_value_count.clear();
00502     ProgressBar* pb = new ProgressBar( "computing the neighborhood imputation errors for " + to_deal_with_name, test_length);
00503     for (int test_row = 0; test_row < test_length; test_row++)
00504     {
00505         test_samples_set->getRow(test_row, train_input);
00506         for (int test_col = 0; test_col < train_width; test_col++)
00507         {
00508             if (test_col == to_deal_with_next) knnf_input[test_col] = covariancePreservationValue(test_col);
00509             else if (is_missing(train_input[test_col])) knnf_input[test_col] = covariancePreservationValue(test_col);
00510             else knnf_input[test_col] = train_input[test_col];
00511         }
00512         ball_tree->computeOutput(knnf_input, knnf_neighbors);
00513         real knnf_sum_cov_value = 0.0;
00514         real knnf_sum_miss_value = 0.0;
00515         int  knnv_value_count = 0;
00516         for (int knnf_row = 0; knnf_row < knnf_neighbors.size() && knnv_value_count<nb_neighbors; knnf_row++)
00517         {
00518             real knnf_value = ref_mis((int) knnf_neighbors[knnf_row], to_deal_with_next);
00519             if(!is_missing(knnf_value))
00520             {
00521                 knnf_sum_miss_value += knnf_value;
00522                 knnf_nmiss_value_count[knnv_value_count]+=1;
00523                 knnf_mean_miss_err[knnv_value_count] += pow(to_deal_with_value - (knnf_sum_miss_value / (knnv_value_count+1)), 2);
00524                 knnv_value_count += 1;
00525             }
00526             if (!is_missing(knnf_value) && knnf_row<nb_neighbors)
00527             {
00528                 knnf_sum_cov_value += knnf_value;
00529                 knnf_mean_cov_err[knnf_row] += pow(to_deal_with_value - (knnf_sum_cov_value / (knnf_row+1)), 2);
00530             }else if(knnf_row<nb_neighbors){
00531                 knnf_value = ref_cov((int) knnf_neighbors[knnf_row], to_deal_with_next);
00532                 if (is_missing(knnf_value))
00533                     PLERROR("In TestImputations::computeNeighborhoodStats(): missing value found in the reference with covariance preserved at: %i , %i",
00534                             (int) knnf_neighbors[knnf_row], to_deal_with_next);
00535                 knnf_sum_cov_value += knnf_value;
00536                 knnf_mean_cov_err[knnf_row] += pow(to_deal_with_value - (knnf_sum_cov_value / (knnf_row+1)), 2);
00537             }
00538         }
00539         pb->update( test_row );  
00540     }
00541     delete pb;
00542     knnf_mean_cov_err/=test_length;
00543     knnf_mean_miss_err/=knnf_nmiss_value_count;
00544 }
00545 
00546 void TestImputations::createHeaderFile()
00547 { 
00548     cout << "in createHeaderFile()" << endl;
00549     for (int train_col = 0; train_col < train_width; train_col++)
00550     {
00551         train_stats = train_set->getStats(train_col);
00552         train_total = (int)train_stats.n();
00553         train_missing = (int)train_stats.nmissing();
00554         train_present = train_total - train_missing;
00555         if (train_missing <= 0.0) header_record[train_col] = 0.0;                       // no missing, noting to do.
00556         else if (train_present < min_number_of_samples){
00557             header_record[train_col] = -1.0; // should not happen
00558             PLERROR("In TestImputations::createHeaderFile: train_present(%d) < min_number_of_samples (%d) for variable %d(%s)",
00559                     train_present,min_number_of_samples,train_col,train_set.fieldName(train_col).c_str());
00560         }
00561         else header_record[train_col] = 1.0;                                            // test imputations
00562     }
00563     header_file = new FileVMatrix(header_file_name, 1, train_names);
00564     header_file->putRow(0, header_record);
00565 }
00566 
00567 void TestImputations::getHeaderRecord()
00568 { 
00569     header_file = new FileVMatrix(header_file_name, true);
00570     header_file->getRow(0, header_record);
00571 }
00572 
00573 void TestImputations::updateHeaderRecord(int var_col)
00574 { 
00575     header_file->put(0, var_col, 2.0);
00576     header_file->flush();
00577 }
00578 
00579 void TestImputations::train()
00580 {
00581     // initialize the output file
00582     cout << "initialize the output file: " << output_file_name << endl;
00583     train_set->lockMetaDataDir();
00584     output_record.resize(6+knnf_mean_cov_err.size()+knnf_mean_miss_err.size()+knnf_nmiss_value_count.size());
00585     if (!isfile(output_file_name)) createOutputFile();
00586     else getOutputRecord(to_deal_with_next);
00587     output_record.resize(6);
00588     output_record[0] = test_length;
00589     output_record[1] = mmmf_mean_err;
00590     output_record[2] = mmmf_median_err;
00591     output_record[3] = mmmf_mode_err;
00592     output_record[4] = tcmf_mean_err;
00593     output_record[5] = cvpf_mean_err;
00594     output_record.append(knnf_mean_cov_err);
00595     output_record.append(knnf_mean_miss_err);
00596     output_record.append(knnf_nmiss_value_count);
00597     updateOutputRecord(to_deal_with_next);
00598     train_set->unlockMetaDataDir();
00599 }
00600 
00601 void TestImputations::createOutputFile()
00602 {
00603     output_names.resize(6,knnf_mean_cov_err.size()+knnf_mean_miss_err.size()
00604                         + knnf_nmiss_value_count.size());
00605     output_names[0] = "test_length";
00606     output_names[1] = "mean";
00607     output_names[2] = "median";
00608     output_names[3] = "mode";
00609     output_names[4] = "tree_cond";
00610     output_names[5] = "cov_pres";
00611     for (int knnf_row = 0; knnf_row < knnf_mean_cov_err.size(); knnf_row++)
00612     {
00613         output_names.append("KNN_COV_" + tostring(knnf_row+1));
00614     }
00615     for (int knnf_row = 0; knnf_row < knnf_mean_cov_err.size(); knnf_row++)
00616     {
00617         output_names.append("KNN_MISS_" + tostring(knnf_row+1));
00618     }
00619     for (int knnf_row = 0; knnf_row < knnf_nmiss_value_count.size(); knnf_row++)
00620     {
00621         output_names.append("KNN_NB_MISS_" + tostring(knnf_row+1));
00622     }
00623 
00624     output_record.clear();
00625     output_file = new FileVMatrix(output_file_name, train_width, output_names);
00626     for (int train_col = 0; train_col < train_width; train_col++)
00627         output_file->putRow(train_col, output_record);
00628 }
00629 
00630 void TestImputations::getOutputRecord(int var_col)
00631 { 
00632     output_file = new FileVMatrix(output_file_name, true);
00633     output_record.resize(output_file->width());
00634     output_file->getRow(var_col, output_record);
00635 }
00636 
00637 void TestImputations::updateOutputRecord(int var_col)
00638 { 
00639     output_file->putRow(var_col, output_record);
00640     output_file->flush();
00641 }
00642 
00643 int TestImputations::outputsize() const {return 0;}
00644 void TestImputations::computeOutput(const Vec&, Vec&) const {}
00645 void TestImputations::computeCostsFromOutputs(const Vec&, const Vec&, const Vec&, Vec&) const {}
00646 TVec<string> TestImputations::getTestCostNames() const
00647 {
00648     TVec<string> result;
00649     result.append( "MSE" );
00650     return result;
00651 }
00652 TVec<string> TestImputations::getTrainCostNames() const
00653 {
00654     TVec<string> result;
00655     result.append( "MSE" );
00656     return result;
00657 }
00658 
00659 } // end of namespace PLearn
00660 
00661 
00662 /*
00663   Local Variables:
00664   mode:c++
00665   c-basic-offset:4
00666   c-file-style:"stroustrup"
00667   c-file-offsets:((innamespace . 0)(inline-open . 0))
00668   indent-tabs-mode:nil
00669   fill-column:79
00670   End:
00671 */
00672 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines