PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // NeighborhoodConditionalMean.cc 00004 // 00005 // Copyright (C) 2006 Dan Popovici, Pascal Lamblin 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Dan Popovici 00036 00039 #define PL_LOG_MODULE_NAME "NeighborhoodConditionalMean" 00040 00041 #include "NeighborhoodConditionalMean.h" 00042 #include <plearn/io/pl_log.h> 00043 00044 namespace PLearn { 00045 using namespace std; 00046 00047 PLEARN_IMPLEMENT_OBJECT( 00048 NeighborhoodConditionalMean, 00049 "Computes correlation coefficient between various discrete values and the target.", 00050 "name of the discrete variable, of the target and the values to check are options.\n" 00051 ); 00052 00054 // NeighborhoodConditionalMean // 00056 NeighborhoodConditionalMean::NeighborhoodConditionalMean() 00057 { 00058 } 00059 00061 // declareOptions // 00063 void NeighborhoodConditionalMean::declareOptions(OptionList& ol) 00064 { 00065 00066 declareOption(ol, "test_train_input_set", &NeighborhoodConditionalMean::test_train_input_set, 00067 OptionBase::buildoption, 00068 "The concatenated test and train input vectors with missing values."); 00069 declareOption(ol, "test_train_target_set", &NeighborhoodConditionalMean::test_train_target_set, 00070 OptionBase::buildoption, 00071 "The corresponding target vectors."); 00072 declareOption(ol, "number_of_test_samples", &NeighborhoodConditionalMean::number_of_test_samples, 00073 OptionBase::buildoption, 00074 "The number of test samples at the beginning of the test train concatenated sets."); 00075 declareOption(ol, "number_of_train_samples", &NeighborhoodConditionalMean::number_of_train_samples, 00076 OptionBase::buildoption, 00077 "The number of train samples in the reference set to compute the % of missing."); 00078 declareOption(ol, "target_field_names", &NeighborhoodConditionalMean::target_field_names, 00079 OptionBase::buildoption, 00080 "The vector of names of the field to select from the target_set as target for the built training files."); 00081 declareOption(ol, "train_covariance_file_name", &NeighborhoodConditionalMean::train_covariance_file_name, 00082 OptionBase::buildoption, 00083 "The path to the file train set where missing value are imputed by the covariance preservation algo."); 00084 declareOption(ol, "test_train_covariance_file_name", &NeighborhoodConditionalMean::test_train_covariance_file_name, 00085 OptionBase::buildoption, 00086 "The path to the file test_train set where missing value are imputed by the covariance preservation algo."); 00087 declareOption(ol, "various_ks", &NeighborhoodConditionalMean::various_ks, 00088 OptionBase::buildoption, 00089 "The vector of various Ks to experiment with. Values must be between 1 and 100."); 00090 declareOption(ol, "deletion_thresholds", &NeighborhoodConditionalMean::deletion_thresholds, 00091 OptionBase::buildoption, 00092 "The vector of thresholds to be tested for each of the various Ks."); 00093 declareOption(ol, "experiment_name", &NeighborhoodConditionalMean::experiment_name, 00094 OptionBase::buildoption, 00095 "The name of the group of experiments to conduct."); 00096 declareOption(ol, "missing_indicator_field_names", &NeighborhoodConditionalMean::missing_indicator_field_names, 00097 OptionBase::buildoption, 00098 "The field names of the missing indicators to exclude when we experiment without them."); 00099 declareOption(ol, "experiment_template", &NeighborhoodConditionalMean::experiment_template, 00100 OptionBase::buildoption, 00101 "The template of the script to conduct the experiment."); 00102 00103 inherited::declareOptions(ol); 00104 } 00105 00107 // makeDeepCopyFromShallowCopy // 00109 void NeighborhoodConditionalMean::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00110 { 00111 deepCopyField(test_train_input_set, copies); 00112 deepCopyField(test_train_target_set, copies); 00113 deepCopyField(number_of_test_samples, copies); 00114 deepCopyField(number_of_train_samples, copies); 00115 deepCopyField(target_field_names, copies); 00116 deepCopyField(test_train_covariance_file_name, copies); 00117 deepCopyField(train_covariance_file_name, copies); 00118 deepCopyField(various_ks, copies); 00119 deepCopyField(deletion_thresholds, copies); 00120 deepCopyField(experiment_name, copies); 00121 deepCopyField(missing_indicator_field_names, copies); 00122 deepCopyField(deletion_thresholds, copies); 00123 inherited::makeDeepCopyFromShallowCopy(copies); 00124 00125 } 00126 00128 // build // 00130 void NeighborhoodConditionalMean::build() 00131 { 00132 // ### Nothing to add here, simply calls build_(). 00133 inherited::build(); 00134 build_(); 00135 } 00136 00138 // build_ // 00140 void NeighborhoodConditionalMean::build_() 00141 { 00142 MODULE_LOG << "build_() called" << endl; 00143 if (train_set) 00144 { 00145 for (int iteration = 1; iteration <= 1; iteration++) 00146 { 00147 cout << "In NeighborhoodConditionalMean, Iteration # " << iteration << endl; 00148 computeNeighborhood(); 00149 experimentWithVariousKs(); 00150 train(); 00151 } 00152 PLERROR("In NeighborhoodConditionalMean: we are done here"); 00153 } 00154 } 00155 00156 void NeighborhoodConditionalMean::computeNeighborhood() 00157 { 00158 /* 00159 prepare correlation based versions of datatset: we have to write a VMatrix for that 00160 use the ball tree nearest neighbor to build a ball tree using train only, with unknown it would be too long 00161 find the 100 nearest neighbors of samples in train and test in order from the closest to the furthest 00162 now we can create a neighborhood imputation for K from 1 up to 100 averaging 00163 the observed values of the the k closest input vectors. 00164 If there is no observed values in the k closest, we have to use something else: 00165 mean of the covariance preservation imputationof the the k closest input vectors. 00166 */ 00167 cout << "In NeighborhoodConditionalMean:" << endl; 00168 cout << endl << "****** STEP 1 ******" << endl; 00169 cout << "The first thing to do is to impute an initial value the the missing values in order to be able" << endl; 00170 cout << "to compute distance between samples." << endl; 00171 cout << "This step uses the CovariancePreservationVMatrix to do that." << endl; 00172 cout << "The Covariance PreservationVMatrix creates a covariance_file in the metadata of the source file" << endl; 00173 cout << "if it is not already there." << endl; 00174 cout << "The file is kept in train_imputed_with_covariance_preservation.pmat." << endl; 00175 if( train_covariance_file_name == "" ) 00176 PLERROR("In NeighborhoodConditionalMean::computeNeighborhood() train_covariance_file_name must not be empty",train_covariance_file_name.c_str()); 00177 if (isfile(train_covariance_file_name)) 00178 { 00179 train_covariance_file = new FileVMatrix(train_covariance_file_name); 00180 train_covariance_file->defineSizes(train_covariance_file->width(), 0, 0); 00181 cout << train_covariance_file_name << " already exist, we are skipping this step." << endl; 00182 } 00183 else 00184 { 00185 train_covariance_vmatrix = new CovariancePreservationImputationVMatrix(); 00186 train_covariance_vmatrix->source = train_set; 00187 train_covariance_vmatrix->train_set = train_set; 00188 train_covariance_vmatrix->build(); 00189 train_covariance_vmat = train_covariance_vmatrix; 00190 train_covariance_file = new FileVMatrix(train_covariance_file_name, train_covariance_vmat->length(), train_covariance_vmat->fieldNames()); 00191 train_covariance_file->defineSizes(train_covariance_vmat->width(), 0, 0); 00192 pb = new ProgressBar("Saving the train file imputed with the covariance preservation", train_covariance_vmat->length()); 00193 train_covariance_vector.resize(train_covariance_vmat->width()); 00194 for (int train_covariance_row = 0; train_covariance_row < train_covariance_vmat->length(); train_covariance_row++) 00195 { 00196 train_covariance_vmat->getRow(train_covariance_row, train_covariance_vector); 00197 train_covariance_file->putRow(train_covariance_row, train_covariance_vector); 00198 pb->update( train_covariance_row ); 00199 } 00200 delete pb; 00201 } 00202 cout << endl << "****** STEP 2 ******" << endl; 00203 cout << "We do the same thing with the test_train dataset" << endl; 00204 cout << "using the covariance file created at the previous step." << endl; 00205 cout << "The file is kept in test_train_imputed_with_covariance_preservation.pmat." << endl; 00206 if( test_train_covariance_file_name == "" ) 00207 PLERROR("In NeighborhoodConditionalMean::computeNeighborhood() test_train_covariance_file_name must not be empty",test_train_covariance_file_name.c_str()); 00208 if (isfile(test_train_covariance_file_name)) 00209 { 00210 test_train_covariance_file = new FileVMatrix(test_train_covariance_file_name); 00211 test_train_covariance_file->defineSizes(test_train_covariance_file->width(), 0, 0); 00212 cout << test_train_covariance_file_name << " already exist, we are skipping this step." << endl; 00213 } 00214 else 00215 { 00216 test_train_covariance_vmatrix = new CovariancePreservationImputationVMatrix(); 00217 test_train_covariance_vmatrix->source = test_train_input_set; 00218 test_train_covariance_vmatrix->train_set = train_set; 00219 test_train_covariance_vmatrix->build(); 00220 test_train_covariance_vmat = test_train_covariance_vmatrix; 00221 test_train_covariance_file = new FileVMatrix(test_train_covariance_file_name, test_train_covariance_vmat->length(), test_train_covariance_vmat->fieldNames()); 00222 test_train_covariance_file->defineSizes(test_train_covariance_vmat->width(), 0, 0); 00223 pb = new ProgressBar("Saving the test_train file imputed with the covariance preservation", test_train_covariance_vmat->length()); 00224 test_train_covariance_vector.resize(test_train_covariance_vmat->width()); 00225 for (int test_train_covariance_row = 0; test_train_covariance_row < test_train_covariance_vmat->length(); test_train_covariance_row++) 00226 { 00227 test_train_covariance_vmat->getRow(test_train_covariance_row, test_train_covariance_vector); 00228 test_train_covariance_file->putRow(test_train_covariance_row, test_train_covariance_vector); 00229 pb->update( test_train_covariance_row ); 00230 } 00231 delete pb; 00232 } 00233 cout << endl << "****** STEP 3 ******" << endl; 00234 cout << "We this initial imputation, we find the 100 nearest neighbors of each sample in the test_train dataset." << endl; 00235 cout << "Their indexes are kept in the neighborhood_file of the test_train dataset metadata." << endl; 00236 cout << "The BallTreeNearestNeighbors learner is used to build a tree with the train set" << endl; 00237 cout << "in order to speed up the identification of the 100 nearest neighbors of the test_train dataset." << endl; 00238 test_train_neighborhood_file_name = test_train_covariance_file_name + ".metadata/neighborhood_file.pmat"; 00239 if (isfile(test_train_neighborhood_file_name)) 00240 { 00241 test_train_neighborhood_file = new FileVMatrix(test_train_neighborhood_file_name); 00242 cout << test_train_neighborhood_file_name << " already exist, we are skipping this step." << endl; 00243 } 00244 else 00245 { 00246 test_train_neighborhood_learner = new BallTreeNearestNeighbors(); 00247 test_train_neighborhood_learner->setOption("rmin", "1"); 00248 test_train_neighborhood_learner->setOption("train_method", "anchor"); 00249 test_train_neighborhood_learner->setOption("num_neighbors", "100"); 00250 test_train_neighborhood_learner->setOption("copy_input", "0"); 00251 test_train_neighborhood_learner->setOption("copy_target", "0"); 00252 test_train_neighborhood_learner->setOption("copy_weight", "0"); 00253 test_train_neighborhood_learner->setOption("copy_index", "1"); 00254 test_train_neighborhood_learner->setOption("nstages", "-1"); 00255 test_train_neighborhood_learner->setOption("report_progress", "1"); 00256 test_train_neighborhood_learner->setTrainingSet(train_covariance_file, true); 00257 test_train_neighborhood_learner->train(); 00258 test_train_neighborhood_file = new FileVMatrix(test_train_neighborhood_file_name, test_train_covariance_file->length(), 100); 00259 test_train_covariance_vector.resize(test_train_covariance_file->width()); 00260 test_train_neighborhood_vector.resize(100); 00261 pb = new ProgressBar("Saving the test_train file with the index of the 100 nearest neighbors", test_train_covariance_file->length()); 00262 for (int test_train_neighborhood_row = 0; test_train_neighborhood_row < test_train_covariance_file->length(); test_train_neighborhood_row++) 00263 { 00264 test_train_covariance_file->getRow(test_train_neighborhood_row, test_train_covariance_vector); 00265 test_train_neighborhood_learner->computeOutput(test_train_covariance_vector, test_train_neighborhood_vector); 00266 test_train_neighborhood_file->putRow(test_train_neighborhood_row, test_train_neighborhood_vector); 00267 pb->update( test_train_neighborhood_row ); 00268 } 00269 delete pb; 00270 } 00271 } 00272 00273 void NeighborhoodConditionalMean::experimentWithVariousKs() 00274 { 00275 /* 00276 We control the experiments using a master header file giving the status for each ks. 00277 If the file is not there, we create it. 00278 An experiment directory is created for each ks to eexperiment with various level 00279 of variable deletion. 00280 */ 00281 cout << endl << "****** STEP 4 ******" << endl; 00282 cout << "We now prepare experimentation at various levels of Ks, the number of neighbors between 1 and 100." << endl; 00283 cout << "The first thing is to load the master header file from the test_train_imputed_with_covariance_preservation.pmat metadata." << endl; 00284 cout << "If it is not there, the file is created." << endl; 00285 train_set->lockMetaDataDir(); 00286 master_header_file_name = test_train_covariance_file_name + ".metadata"; 00287 master_header_file_name += "/Experiment/" + experiment_name + "/"; 00288 master_header_file_name += "neighborhood_header.pmat"; 00289 if (!isfile(master_header_file_name)) createMasterHeaderFile(); 00290 else getMasterHeaderRecords(); 00291 cout << "With the master header data, we can choose which K to experiment with." << endl; 00292 for (master_header_row = 0; master_header_row < master_header_length; master_header_row++) 00293 { 00294 for (master_header_col = 0; master_header_col < master_header_width; master_header_col++) 00295 if (master_header_records(master_header_row, master_header_col) <= 0.0) break; 00296 if (master_header_col < master_header_width) break; 00297 } 00298 if (master_header_row >= master_header_length) 00299 { 00300 train_set->unlockMetaDataDir(); 00301 //reviewGlobalStats(); 00302 PLERROR("In NeighborhoodConditionalMean: we are done here"); 00303 } 00304 to_deal_with_k = various_ks[master_header_col]; 00305 to_deal_with_target = target_field_names[master_header_row / 2]; 00306 to_deal_with_ind = master_header_row % 2; 00307 cout << "Next target to deal with: " << to_deal_with_target << endl; 00308 cout << "Next experiment missing indicator: " << to_deal_with_ind << endl; 00309 cout << "Next k (number of neighbors) to experiment with: " << to_deal_with_k << endl; 00310 updateMasterHeaderRecords(master_header_row, master_header_col); 00311 train_set->unlockMetaDataDir(); 00312 cout << endl << "****** STEP 5 ******" << endl; 00313 cout << "We perform the imputaton with the selected number of neighbors." << endl; 00314 cout << "The resulting file is loaded in memory to be passed to the experimentation script." << endl; 00315 test_train_neighbor_imputation_vmatrix = new NeighborhoodImputationVMatrix(); 00316 test_train_neighbor_imputation_vmatrix->source = test_train_input_set; 00317 test_train_neighbor_imputation_vmatrix->reference_index = test_train_neighborhood_file; 00318 test_train_neighbor_imputation_vmatrix->reference_with_missing = train_set; 00319 test_train_neighbor_imputation_vmatrix->reference_with_covariance_preserved = train_covariance_file; 00320 test_train_neighbor_imputation_vmatrix->number_of_neighbors = to_deal_with_k; 00321 test_train_neighbor_imputation_vmatrix->build(); 00322 test_train_neighbor_imputation_vmat = test_train_neighbor_imputation_vmatrix; 00323 test_train_neighbor_imputation_file = new MemoryVMatrix(test_train_neighbor_imputation_vmat->length(), test_train_neighbor_imputation_vmat->width()); 00324 test_train_neighbor_imputation_file->defineSizes(test_train_neighbor_imputation_vmat->width(), 0, 0); 00325 test_train_neighbor_imputation_file->declareFieldNames(test_train_neighbor_imputation_vmat->fieldNames()); 00326 test_train_neighbor_imputation_vector.resize(test_train_neighbor_imputation_vmat->width()); 00327 pb = new ProgressBar("Loading the test_train file imputed with the selected # of neighbors", test_train_neighbor_imputation_vmat->length()); 00328 for (int test_train_neighbor_imputation_row = 0; 00329 test_train_neighbor_imputation_row < test_train_neighbor_imputation_vmat->length(); 00330 test_train_neighbor_imputation_row++) 00331 { 00332 test_train_neighbor_imputation_vmat->getRow(test_train_neighbor_imputation_row, test_train_neighbor_imputation_vector); 00333 test_train_neighbor_imputation_file->putRow(test_train_neighbor_imputation_row, test_train_neighbor_imputation_vector); 00334 pb->update( test_train_neighbor_imputation_row ); 00335 } 00336 // ::PLearn::save(header_expdir + "/" + deletion_threshold_str + "/source_names.psave", source_names); 00337 delete pb; 00338 cout << endl << "****** STEP 6 ******" << endl; 00339 cout << "We are now ready to launch the experimentation for this k." << endl; 00340 cout << "The Experimentation program will build learners for the specified deletion thresholds." << endl; 00341 experimentation_learner = new Experimentation(); 00342 experimentation_learner->save_files = 0; 00343 experimentation_learner->experiment_without_missing_indicator = to_deal_with_ind; 00344 experimentation_learner->target_field_name = to_deal_with_target; 00345 experimentation_learner->missing_indicator_field_names = missing_indicator_field_names; 00346 experimentation_learner->experiment_name = experiment_name; 00347 experimentation_learner->number_of_test_samples = number_of_test_samples; 00348 experimentation_learner->number_of_train_samples = number_of_train_samples; 00349 experimentation_learner->reference_train_set = train_set; 00350 experimentation_learner->target_set = test_train_target_set; 00351 experimentation_learner->experiment_template = experiment_template; 00352 experimentation_learner->deletion_thresholds = deletion_thresholds; 00353 experimentation_learner->experiment_directory = test_train_covariance_file_name + ".metadata"; 00354 experimentation_learner->experiment_directory += "/Experiment/" + experiment_name + "/"; 00355 experimentation_learner->experiment_directory += "K_" + tostring(to_deal_with_k); 00356 experimentation_learner->setTrainingSet(test_train_neighbor_imputation_file); 00357 } 00358 00359 void NeighborhoodConditionalMean::createMasterHeaderFile() 00360 { 00361 master_header_length = target_field_names.length() * 2; 00362 master_header_width = various_ks.length(); 00363 master_header_names.resize(master_header_width); 00364 master_header_records.resize(master_header_length, master_header_width); 00365 master_header_records.clear(); 00366 for (master_header_col = 0; master_header_col < master_header_width; master_header_col++) 00367 master_header_names[master_header_col] = "K_" + tostring(master_header_col); 00368 master_header_file = new FileVMatrix(master_header_file_name, master_header_length, master_header_names); 00369 for (master_header_row = 0; master_header_row < master_header_length; master_header_row++) 00370 for (master_header_col = 0; master_header_col < master_header_width; master_header_col++) 00371 master_header_file->put(master_header_row, master_header_col, 0.0); 00372 } 00373 void NeighborhoodConditionalMean::getMasterHeaderRecords() 00374 { 00375 master_header_file = new FileVMatrix(master_header_file_name, true); 00376 master_header_length = master_header_file->length(); 00377 master_header_width = master_header_file->width(); 00378 if (master_header_length != target_field_names.length() * 2) 00379 PLERROR("In NeighborhoodConditionalMean: master header file length and target_field_names do not agree"); 00380 if (master_header_width != various_ks.length()) 00381 PLERROR("In NeighborhoodConditionalMean: master header file width and various_ks do not agree"); 00382 master_header_records.resize(master_header_length, master_header_width); 00383 for (master_header_row = 0; master_header_row < master_header_length; master_header_row++) 00384 for (master_header_col = 0; master_header_col < master_header_width; master_header_col++) 00385 master_header_records(master_header_row, master_header_col) = master_header_file->get(master_header_row, master_header_col); 00386 } 00387 00388 void NeighborhoodConditionalMean::updateMasterHeaderRecords(int row, int col) 00389 { 00390 master_header_records(row, col) += 1.0; 00391 master_header_file->put(row, col, master_header_records(row, col)); 00392 master_header_file->flush(); 00393 } 00394 00395 /* 00396 void NeighborhoodConditionalMean::createHeaderFile() 00397 { 00398 for (main_col = 0; main_col < main_width; main_col++) 00399 { 00400 targeted_stats = targeted_set->getStats(main_col); 00401 targeted_missing = targeted_stats.nmissing(); 00402 main_stats = train_set->getStats(main_col); 00403 main_total = main_stats.n(); 00404 main_missing = main_stats.nmissing(); 00405 main_present = main_total - main_missing; 00406 if (fields_selected[main_col] < 1) header_record[main_col] = 1; // delete column, field not selected 00407 else if (targeted_missing <= 0) header_record[main_col] = 0; // nothing to do 00408 else if (main_present < min_number_of_samples) header_record[main_col] = 1; // delete column 00409 else header_record[main_col] = 2; // build tree 00410 } 00411 header_file = new FileVMatrix(header_file_name, 1, main_names); 00412 header_file->putRow(0, header_record); 00413 } 00414 00415 void NeighborhoodConditionalMean::getHeaderRecord() 00416 { 00417 header_file = new FileVMatrix(header_file_name, true); 00418 header_file->getRow(0, header_record); 00419 for (main_col = 0; main_col < main_width; main_col++) 00420 { 00421 if (header_record[main_col] == 0) continue; 00422 if (header_record[main_col] == 2) continue; 00423 if (header_record[main_col] == 1 && fields_selected[main_col] < 1) continue; 00424 if (header_record[main_col] == 1) 00425 { 00426 main_stats = train_set->getStats(main_col); 00427 main_total = main_stats.n(); 00428 main_missing = main_stats.nmissing(); 00429 main_present = main_total - main_missing; 00430 if (main_present >= min_number_of_samples) header_record[main_col] = 2; 00431 continue; 00432 } 00433 } 00434 } 00435 00436 void NeighborhoodConditionalMean::updateHeaderRecord(int var_col) 00437 { 00438 header_file->put(0, var_col, 3.0); 00439 } 00440 00441 void NeighborhoodConditionalMean::reviewGlobalStats() 00442 { 00443 cout << "There is no more variable to deal with." << endl; 00444 for (main_col = 0; main_col < main_width; main_col++) 00445 { 00446 if (header_record[main_col] == 0) 00447 { 00448 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00449 cout << " : no missing values for this variable in the targeted files." << endl; 00450 continue; 00451 } 00452 if (header_record[main_col] == 1 && fields_selected[main_col] < 1) 00453 { 00454 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00455 cout << " : field not selected." << endl; 00456 continue; 00457 } 00458 if (header_record[main_col] == 1) 00459 { 00460 main_stats = train_set->getStats(main_col); 00461 main_total = main_stats.n(); 00462 main_missing = main_stats.nmissing(); 00463 main_present = main_total - main_missing; 00464 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00465 cout << " : field deleted, only " << setw(6) << main_present << " records to train with." << endl; 00466 continue; 00467 } 00468 results_file_name = targeted_metadata + "/TreeCondMean/dir/" + main_names[main_col] + "/Split0/LearnerExpdir/Strat0results.pmat"; 00469 if (!isfile(results_file_name)) 00470 { 00471 header_file->put(0, main_col, 2.0); 00472 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00473 cout << " : missing results file." << endl; 00474 continue; 00475 } 00476 test_output_file_name = targeted_metadata + "/TreeCondMean/dir/" + main_names[main_col] + "/Split0/test1_outputs.pmat"; 00477 if (!isfile(test_output_file_name)) 00478 { 00479 header_file->put(0, main_col, 2.0); 00480 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00481 cout << " : missing test output file." << endl; 00482 continue; 00483 } 00484 results_file = new FileVMatrix(results_file_name); 00485 results_length = results_file->length(); 00486 results_nstages = results_file->get(results_length - 1, 2); 00487 results_mse = results_file->get(results_length - 1, 6); 00488 results_std_err = results_file->get(results_length - 1, 7); 00489 test_output_file = new FileVMatrix(test_output_file_name); 00490 test_output_length = test_output_file->length(); 00491 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00492 cout << " : tree built with " << setw(2) << (int) results_nstages << " leaves, " 00493 << setw(6) << test_output_length << " test output records found, " 00494 << "performance: " << setiosflags(ios::fixed) << setprecision(4) << results_mse 00495 << " +/- " << setiosflags(ios::fixed) << setprecision(4) << results_std_err << endl; 00496 } 00497 } 00498 */ 00499 00500 void NeighborhoodConditionalMean::train() 00501 { 00502 /* 00503 PP<ExplicitSplitter> explicit_splitter = new ExplicitSplitter(); 00504 explicit_splitter->splitsets.resize(1,2); 00505 explicit_splitter->splitsets(0,0) = output_file; 00506 explicit_splitter->splitsets(0,1) = train_test_file; 00507 cond_mean = ::PLearn::deepCopy(cond_mean_template); 00508 cond_mean->setOption("expdir", targeted_metadata + "/TreeCondMean/dir/" + to_deal_with_name); 00509 cond_mean->splitter = new ExplicitSplitter(); 00510 cond_mean->splitter = explicit_splitter; 00511 cond_mean->build(); 00512 Vec results = cond_mean->perform(true); 00513 */ 00514 } 00515 00516 int NeighborhoodConditionalMean::outputsize() const {return 0;} 00517 void NeighborhoodConditionalMean::computeOutput(const Vec&, Vec&) const {} 00518 void NeighborhoodConditionalMean::computeCostsFromOutputs(const Vec&, const Vec&, const Vec&, Vec&) const {} 00519 TVec<string> NeighborhoodConditionalMean::getTestCostNames() const 00520 { 00521 TVec<string> result; 00522 result.append( "MSE" ); 00523 return result; 00524 } 00525 TVec<string> NeighborhoodConditionalMean::getTrainCostNames() const 00526 { 00527 TVec<string> result; 00528 result.append( "MSE" ); 00529 return result; 00530 } 00531 00532 } // end of namespace PLearn 00533 00534 00535 /* 00536 Local Variables: 00537 mode:c++ 00538 c-basic-offset:4 00539 c-file-style:"stroustrup" 00540 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00541 indent-tabs-mode:nil 00542 fill-column:79 00543 End: 00544 */ 00545 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :