PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // AnalyzeFieldStats.cc 00004 // 00005 // Copyright (C) 2006 Dan Popovici, Pascal Lamblin 00006 // 00007 // Redistribution and use in source and binary forms, with or without 00008 // modification, are permitted provided that the following conditions are met: 00009 // 00010 // 1. Redistributions of source code must retain the above copyright 00011 // notice, this list of conditions and the following disclaimer. 00012 // 00013 // 2. Redistributions in binary form must reproduce the above copyright 00014 // notice, this list of conditions and the following disclaimer in the 00015 // documentation and/or other materials provided with the distribution. 00016 // 00017 // 3. The name of the authors may not be used to endorse or promote 00018 // products derived from this software without specific prior written 00019 // permission. 00020 // 00021 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00022 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00023 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00024 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00025 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00026 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00027 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00028 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00029 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00030 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00031 // 00032 // This file is part of the PLearn library. For more information on the PLearn 00033 // library, go to the PLearn Web site at www.plearn.org 00034 00035 // Authors: Dan Popovici 00036 00039 #define PL_LOG_MODULE_NAME "AnalyzeFieldStats" 00040 00041 #include "AnalyzeFieldStats.h" 00042 #include <plearn/io/pl_log.h> 00043 #include <plearn/io/load_and_save.h> 00044 #include <plearn/io/fileutils.h> 00045 #include <plearn/math/random.h> 00046 #include <plearn/vmat/ExplicitSplitter.h> 00047 00048 namespace PLearn { 00049 using namespace std; 00050 00051 PLEARN_IMPLEMENT_OBJECT( 00052 AnalyzeFieldStats, 00053 "Computes correlation coefficient between various discrete values and the target.", 00054 "name of the discrete variable, of the target and the values to check are options.\n" 00055 ); 00056 00058 // AnalyzeFieldStats // 00060 AnalyzeFieldStats::AnalyzeFieldStats() : 00061 min_number_of_samples(5000), 00062 max_number_of_samples(50000) 00063 { 00064 } 00065 00067 // declareOptions // 00069 void AnalyzeFieldStats::declareOptions(OptionList& ol) 00070 { 00071 00072 declareOption(ol, "min_number_of_samples", &AnalyzeFieldStats::min_number_of_samples, 00073 OptionBase::buildoption, 00074 "The minimum number of samples required to train the learner."); 00075 declareOption(ol, "max_number_of_samples", &AnalyzeFieldStats::max_number_of_samples, 00076 OptionBase::buildoption, 00077 "The maximum number of samples used to train the learner"); 00078 declareOption(ol, "targeted_set", &AnalyzeFieldStats::targeted_set, 00079 OptionBase::buildoption, 00080 "The train and test data sets with the target field."); 00081 declareOption(ol, "cond_mean_template", &AnalyzeFieldStats::cond_mean_template, 00082 OptionBase::buildoption, 00083 "The template of the script to learn the conditional mean."); 00084 declareOption(ol, "fields", &AnalyzeFieldStats::fields, 00085 OptionBase::buildoption, 00086 "The vector of fields to consider by names."); 00087 00088 inherited::declareOptions(ol); 00089 } 00090 00092 // makeDeepCopyFromShallowCopy // 00094 void AnalyzeFieldStats::makeDeepCopyFromShallowCopy(CopiesMap& copies) 00095 { 00096 deepCopyField(min_number_of_samples, copies); 00097 deepCopyField(max_number_of_samples, copies); 00098 deepCopyField(targeted_set, copies); 00099 deepCopyField(cond_mean_template, copies); 00100 deepCopyField(fields, copies); 00101 inherited::makeDeepCopyFromShallowCopy(copies); 00102 00103 } 00104 00106 // build // 00108 void AnalyzeFieldStats::build() 00109 { 00110 // ### Nothing to add here, simply calls build_(). 00111 inherited::build(); 00112 build_(); 00113 } 00114 00116 // build_ // 00118 void AnalyzeFieldStats::build_() 00119 { 00120 MODULE_LOG << "build_() called" << endl; 00121 if (train_set) 00122 { 00123 for (int iteration = 1; iteration <= train_set->width(); iteration++) 00124 { 00125 cout << "In AnalyzeFieldStats, Iteration # " << iteration << endl; 00126 analyzeVariableStats(); 00127 train(); 00128 } 00129 PLERROR("AnalyzeFieldStats::build_() we are done here"); 00130 } 00131 } 00132 00133 void AnalyzeFieldStats::analyzeVariableStats() 00134 { 00135 // initialize primary dataset 00136 int main_length = train_set->length(); 00137 main_width = train_set->width(); 00138 Vec main_input; 00139 main_input.resize(main_width); 00140 main_names.resize(main_width); 00141 main_names << train_set->fieldNames(); 00142 main_metadata = train_set->getMetaDataDir(); 00143 00144 // validate the field instructions 00145 fields_width = fields.size(); 00146 fields_selected.resize(main_width); 00147 fields_selected.clear(); 00148 for (fields_col = 0; fields_col < fields_width; fields_col++) 00149 { 00150 int main_col; 00151 for (main_col = 0; main_col < main_width; main_col++) 00152 { 00153 if (fields[fields_col] == main_names[main_col]) break; 00154 } 00155 if (main_col >= main_width) 00156 PLERROR("In AnalyzeFieldStats::analyzeVariableStats() no field with this name in input dataset: %s", (fields[fields_col]).c_str()); 00157 fields_selected[main_col] = 1; 00158 } 00159 00160 // initialize targeted datasets 00161 cout << "initialize train_test datasets" << endl; 00162 targeted_length = targeted_set->length(); 00163 targeted_width = targeted_set->width(); 00164 targeted_input.resize(targeted_width); 00165 targeted_names.resize(targeted_width); 00166 targeted_names << targeted_set->fieldNames(); 00167 targeted_metadata = targeted_set->getMetaDataDir(); 00168 00169 // initialize the header file 00170 cout << "initialize the header file" << endl; 00171 train_set->lockMetaDataDir(); 00172 header_record.resize(main_width); 00173 header_file_name = targeted_metadata + "/TreeCondMean/header.pmat"; 00174 if (!isfile(header_file_name)) createHeaderFile(); 00175 else getHeaderRecord(); 00176 00177 // choose variable to build a conditionnal function for 00178 cout << "choose variable to build a conditionnal function for" << endl; 00179 TVec<int> indices; 00180 to_deal_with_total = 0; 00181 to_deal_with_next = -1; 00182 for (int main_col = 0; main_col < main_width; main_col++) 00183 { 00184 if (header_record[main_col] != 2.0) continue; 00185 to_deal_with_total += 1; 00186 if (to_deal_with_next < 0) to_deal_with_next = main_col; 00187 } 00188 if (to_deal_with_next < 0) 00189 { 00190 train_set->unlockMetaDataDir(); 00191 reviewGlobalStats(); 00192 PLERROR("AnalyzeFieldStats::analyzeVariableStats() we are done here"); 00193 } 00194 to_deal_with_name = main_names[to_deal_with_next]; 00195 cout << "total number of variable left to deal with: " << to_deal_with_total << endl; 00196 cout << "next variable to deal with: " << main_names[to_deal_with_next] << endl; 00197 updateHeaderRecord(to_deal_with_next); 00198 train_set->unlockMetaDataDir(); 00199 00200 // find the available targeted records for this variable 00201 ProgressBar* pb = 0; 00202 main_stats = train_set->getStats(to_deal_with_next); 00203 main_total = main_stats.n(); 00204 main_missing = main_stats.nmissing(); 00205 main_present = main_total - main_missing; 00206 indices.resize((int) main_present); 00207 ind_next = 0; 00208 pb = new ProgressBar( "Building the indices for " + to_deal_with_name, main_length); 00209 for (int main_row = 0; main_row < main_length; main_row++) 00210 { 00211 to_deal_with_value = train_set->get(main_row, to_deal_with_next); 00212 if (is_missing(to_deal_with_value)) continue; 00213 if (ind_next >= indices.length()) 00214 PLERROR("AnalyzeFieldStats::analyzeVariableStats() There seems to be more present values than indicated by the stats file"); 00215 indices[ind_next] = main_row; 00216 ind_next += 1; 00217 pb->update( main_row ); 00218 } 00219 delete pb; 00220 00221 // shuffle the indices. 00222 manual_seed(123456); 00223 shuffleElements(indices); 00224 00225 // initialize output datasets 00226 output_length = (int) main_present; 00227 if (output_length > max_number_of_samples) output_length = max_number_of_samples; 00228 output_width = 0; 00229 for (int main_col = 0; main_col < main_width; main_col++) 00230 { 00231 if (header_record[main_col] != 1) output_width += 1; 00232 } 00233 output_variable_src.resize(output_width); 00234 output_names.resize(output_width); 00235 output_vec.resize(output_width); 00236 output_path = main_metadata + "condmean_" + to_deal_with_name + ".pmat"; 00237 output_col = 0; 00238 for (fields_col = 0; fields_col < fields_width; fields_col++) 00239 { 00240 int main_col; 00241 for (main_col = 0; main_col < main_width; main_col++) 00242 { 00243 if (fields[fields_col] == main_names[main_col]) break; 00244 } 00245 if (main_col >= main_width) 00246 PLERROR("In AnalyzeFieldStats::analyzeVariableStats() no field with this name in input dataset: %s", (fields[fields_col]).c_str()); 00247 if (fields_col != to_deal_with_next && header_record[main_col] != 1) 00248 { 00249 output_variable_src[output_col] = main_col; 00250 output_names[output_col] = fields[fields_col]; 00251 output_col += 1; 00252 } 00253 } 00254 output_variable_src[output_col] = to_deal_with_next; 00255 output_names[output_col] = to_deal_with_name; 00256 output_file = new MemoryVMatrix(output_length, output_width); 00257 output_file->declareFieldNames(output_names); 00258 output_file->defineSizes(output_width - 1, 1, 0); 00259 00260 //Now, we can build the training file 00261 pb = new ProgressBar( "Building the training file for " + to_deal_with_name, output_length); 00262 for (int main_row = 0; main_row < output_length; main_row++) 00263 { 00264 train_set->getRow(indices[main_row], main_input); 00265 for (output_col = 0; output_col < output_width; output_col++) 00266 { 00267 output_vec[output_col] = main_input[output_variable_src[output_col]]; 00268 } 00269 output_file->putRow(main_row, output_vec); 00270 pb->update( main_row ); 00271 } 00272 delete pb; 00273 00274 // initialize train_test datasets 00275 train_test_length = targeted_length; 00276 train_test_variable_src.resize(output_width); 00277 train_test_path = targeted_metadata + "targeted_" + to_deal_with_name + ".pmat"; 00278 output_col = 0; 00279 for (fields_col = 0; fields_col < fields_width; fields_col++) 00280 { 00281 int main_col; 00282 for (main_col = 0; main_col < targeted_width; main_col++) 00283 { 00284 if (fields[fields_col] == targeted_names[main_col]) break; 00285 } 00286 if (main_col >= targeted_width) 00287 PLERROR("In AnalyzeFieldStats::analyzeVariableStats() no field with this name in targeted dataset: %s", (fields[fields_col]).c_str()); 00288 if (fields_col != to_deal_with_next && header_record[main_col] != 1) 00289 { 00290 train_test_variable_src[output_col] = main_col; 00291 output_col += 1; 00292 } 00293 } 00294 train_test_variable_src[output_col] = to_deal_with_next; 00295 train_test_file = new MemoryVMatrix(train_test_length, output_width); 00296 train_test_file->declareFieldNames(output_names); 00297 train_test_file->defineSizes(output_width - 1, 1, 0); 00298 00299 //Now, we can build the targeted file 00300 pb = new ProgressBar( "Building the targeted file for " + to_deal_with_name, train_test_length); 00301 for (int main_row = 0; main_row < train_test_length; main_row++) 00302 { 00303 targeted_set->getRow(main_row, targeted_input); 00304 for (output_col = 0; output_col < output_width; output_col++) 00305 { 00306 output_vec[output_col] = targeted_input[train_test_variable_src[output_col]]; 00307 } 00308 train_test_file->putRow(main_row, output_vec); 00309 pb->update( main_row ); 00310 } 00311 delete pb; 00312 } 00313 00314 void AnalyzeFieldStats::createHeaderFile() 00315 { 00316 for (int main_col = 0; main_col < main_width; main_col++) 00317 { 00318 targeted_stats = targeted_set->getStats(main_col); 00319 targeted_missing = targeted_stats.nmissing(); 00320 main_stats = train_set->getStats(main_col); 00321 main_total = main_stats.n(); 00322 main_missing = main_stats.nmissing(); 00323 main_present = main_total - main_missing; 00324 if (fields_selected[main_col] < 1) header_record[main_col] = 1; // delete column, field not selected 00325 else if (targeted_missing <= 0) header_record[main_col] = 0; // nothing to do 00326 else if (main_present < min_number_of_samples) header_record[main_col] = 1; // delete column 00327 else header_record[main_col] = 2; // build tree 00328 } 00329 header_file = new FileVMatrix(header_file_name, 1, main_names); 00330 header_file->putRow(0, header_record); 00331 } 00332 00333 void AnalyzeFieldStats::getHeaderRecord() 00334 { 00335 header_file = new FileVMatrix(header_file_name, true); 00336 header_file->getRow(0, header_record); 00337 for (int main_col = 0; main_col < main_width; main_col++) 00338 { 00339 if (header_record[main_col] == 0) continue; 00340 if (header_record[main_col] == 2) continue; 00341 if (header_record[main_col] == 1 && fields_selected[main_col] < 1) continue; 00342 if (header_record[main_col] == 1) 00343 { 00344 main_stats = train_set->getStats(main_col); 00345 main_total = main_stats.n(); 00346 main_missing = main_stats.nmissing(); 00347 main_present = main_total - main_missing; 00348 if (main_present >= min_number_of_samples) header_record[main_col] = 2; 00349 continue; 00350 } 00351 } 00352 } 00353 00354 void AnalyzeFieldStats::updateHeaderRecord(int var_col) 00355 { 00356 header_file->put(0, var_col, 3.0); 00357 } 00358 00359 void AnalyzeFieldStats::reviewGlobalStats() 00360 { 00361 cout << "There is no more variable to deal with." << endl; 00362 for (int main_col = 0; main_col < main_width; main_col++) 00363 { 00364 if (header_record[main_col] == 0) 00365 { 00366 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00367 cout << " : no missing values for this variable in the targeted files." << endl; 00368 continue; 00369 } 00370 if (header_record[main_col] == 1 && fields_selected[main_col] < 1) 00371 { 00372 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00373 cout << " : field not selected." << endl; 00374 continue; 00375 } 00376 if (header_record[main_col] == 1) 00377 { 00378 main_stats = train_set->getStats(main_col); 00379 main_total = main_stats.n(); 00380 main_missing = main_stats.nmissing(); 00381 main_present = main_total - main_missing; 00382 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00383 cout << " : field deleted, only " << setw(6) << main_present << " records to train with." << endl; 00384 continue; 00385 } 00386 results_file_name = targeted_metadata + "/TreeCondMean/dir/" + main_names[main_col] + "/Split0/LearnerExpdir/Strat0results.pmat"; 00387 if (!isfile(results_file_name)) 00388 { 00389 header_file->put(0, main_col, 2.0); 00390 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00391 cout << " : missing results file." << endl; 00392 continue; 00393 } 00394 test_output_file_name = targeted_metadata + "/TreeCondMean/dir/" + main_names[main_col] + "/Split0/test1_outputs.pmat"; 00395 if (!isfile(test_output_file_name)) 00396 { 00397 header_file->put(0, main_col, 2.0); 00398 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00399 cout << " : missing test output file." << endl; 00400 continue; 00401 } 00402 results_file = new FileVMatrix(results_file_name); 00403 results_length = results_file->length(); 00404 results_nstages = results_file->get(results_length - 1, 2); 00405 results_mse = results_file->get(results_length - 1, 6); 00406 results_std_err = results_file->get(results_length - 1, 7); 00407 test_output_file = new FileVMatrix(test_output_file_name); 00408 test_output_length = test_output_file->length(); 00409 cout << setiosflags(ios::left) << setw(30) << main_names[main_col]; 00410 cout << " : tree built with " << setw(2) << (int) results_nstages << " leaves, " 00411 << setw(6) << test_output_length << " test output records found, " 00412 << "performance: " << setiosflags(ios::fixed) << setprecision(4) << results_mse 00413 << " +/- " << setiosflags(ios::fixed) << setprecision(4) << results_std_err << endl; 00414 } 00415 } 00416 00417 void AnalyzeFieldStats::train() 00418 { 00419 PP<ExplicitSplitter> explicit_splitter = new ExplicitSplitter(); 00420 explicit_splitter->splitsets.resize(1,2); 00421 explicit_splitter->splitsets(0,0) = output_file; 00422 explicit_splitter->splitsets(0,1) = train_test_file; 00423 PP<PTester> cond_mean = ::PLearn::deepCopy(cond_mean_template); 00424 cond_mean->setOption("expdir", targeted_metadata + "/TreeCondMean/dir/" + to_deal_with_name); 00425 cond_mean->splitter = new ExplicitSplitter(); 00426 cond_mean->splitter = explicit_splitter; 00427 cond_mean->build(); 00428 Vec results = cond_mean->perform(true); 00429 } 00430 00431 int AnalyzeFieldStats::outputsize() const {return 0;} 00432 void AnalyzeFieldStats::computeOutput(const Vec&, Vec&) const {} 00433 void AnalyzeFieldStats::computeCostsFromOutputs(const Vec&, const Vec&, const Vec&, Vec&) const {} 00434 TVec<string> AnalyzeFieldStats::getTestCostNames() const 00435 { 00436 TVec<string> result; 00437 result.append( "MSE" ); 00438 return result; 00439 } 00440 TVec<string> AnalyzeFieldStats::getTrainCostNames() const 00441 { 00442 TVec<string> result; 00443 result.append( "MSE" ); 00444 return result; 00445 } 00446 00447 } // end of namespace PLearn 00448 00449 00450 /* 00451 Local Variables: 00452 mode:c++ 00453 c-basic-offset:4 00454 c-file-style:"stroustrup" 00455 c-file-offsets:((innamespace . 0)(inline-open . 0)) 00456 indent-tabs-mode:nil 00457 fill-column:79 00458 End: 00459 */ 00460 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :