PLearn 0.1
|
00001 // -*- C++ -*- 00002 00003 // PLearn (A C++ Machine Learning Library) 00004 // Copyright (C) 1998 Pascal Vincent 00005 // Copyright (C) 1999,2000 Pascal Vincent, Yoshua Bengio and University of Montreal 00006 // 00007 00008 // Redistribution and use in source and binary forms, with or without 00009 // modification, are permitted provided that the following conditions are met: 00010 // 00011 // 1. Redistributions of source code must retain the above copyright 00012 // notice, this list of conditions and the following disclaimer. 00013 // 00014 // 2. Redistributions in binary form must reproduce the above copyright 00015 // notice, this list of conditions and the following disclaimer in the 00016 // documentation and/or other materials provided with the distribution. 00017 // 00018 // 3. The name of the authors may not be used to endorse or promote 00019 // products derived from this software without specific prior written 00020 // permission. 00021 // 00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00032 // 00033 // This file is part of the PLearn library. For more information on the PLearn 00034 // library, go to the PLearn Web site at www.plearn.org 00035 00036 00037 00038 /* ******************************************************* 00039 * $Id: databases.cc 8593 2008-02-27 21:16:24Z nouiz $ 00040 * AUTHORS: Pascal Vincent 00041 * This file is part of the PLearn library. 00042 ******************************************************* */ 00043 00044 #include "databases.h" 00045 #include <plearn/vmat/ConcatRowsVMatrix.h> 00046 #include <plearn/db/NistDB.h> 00047 #include <plearn/math/random.h> 00048 #include <plearn/vmat/RemapLastColumnVMatrix.h> 00049 #include <plearn/vmat/ShiftAndRescaleVMatrix.h> 00050 #include <plearn/vmat/Splitter.h> 00051 #include <plearn/vmat/VMat_basic_stats.h> 00052 #include <plearn/io/MatIO.h> 00053 #include <plearn/base/stringutils.h> 00054 #include <plearn/math/TMat_maths.h> 00055 00056 namespace PLearn { 00057 using namespace std; 00058 00059 00060 #define JAVA "java" 00061 00062 Mat input2dSet(const PPath& filename) 00063 { 00064 Mat data; 00065 if(!pathexists(filename)) 00066 { 00067 string systemstring = string(JAVA) + " InputPoints " + filename + " -1 1 -1 1"; 00068 system(systemstring.c_str()); 00069 } 00070 loadAscii(filename, data); 00071 shuffleRows(data); 00072 return data; 00073 } 00074 00075 // normalize training_set validation_set and test_set according to mean and stddev computed on training_set 00076 void normalizeDataSets(Mat& training_set, Mat& validation_set, Mat& test_set) 00077 { 00078 int inputsize = training_set.width()-1; 00079 Mat training_inputs = training_set.subMatColumns(0,inputsize); 00080 Vec meanvec(inputsize); 00081 Vec stddevvec(inputsize); 00082 computeMeanAndStddev(training_inputs, meanvec, stddevvec); 00083 training_inputs -= meanvec; 00084 training_inputs /= stddevvec; 00085 Mat validation_inputs = validation_set.subMatColumns(0,inputsize); 00086 validation_inputs -= meanvec; 00087 validation_inputs /= stddevvec; 00088 Mat test_inputs = test_set.subMatColumns(0,inputsize); 00089 test_inputs -= meanvec; 00090 test_inputs /= stddevvec; 00091 } 00092 // normalize training_set validation_set and test_set according to mean and stddev computed on training_set 00093 void normalizeDataSets(VMat& training_set, VMat& validation_set, VMat& test_set) 00094 { 00095 int inputsize = training_set.width()-1; 00096 Mat training_inputs = training_set.subMatColumns(0,inputsize); 00097 Vec meanvec(inputsize); 00098 Vec stddevvec(inputsize); 00099 computeMeanAndStddev(training_inputs, meanvec, stddevvec); 00100 training_inputs -= meanvec; 00101 training_inputs /= stddevvec; 00102 Mat validation_inputs = validation_set.subMatColumns(0,inputsize); 00103 validation_inputs -= meanvec; 00104 validation_inputs /= stddevvec; 00105 Mat test_inputs = test_set.subMatColumns(0,inputsize); 00106 test_inputs -= meanvec; 00107 test_inputs /= stddevvec; 00108 } 00109 00110 // normalize both training_set and test_set according to mean and stddev computed on training_set 00111 void normalizeDataSets(Mat& training_set, Mat& test_set) 00112 { 00113 int inputsize = training_set.width()-1; 00114 Mat training_inputs = training_set.subMatColumns(0,inputsize); 00115 Vec meanvec(inputsize); 00116 Vec stddevvec(inputsize); 00117 computeMeanAndStddev(training_inputs, meanvec, stddevvec); 00118 training_inputs -= meanvec; 00119 training_inputs /= stddevvec; 00120 Mat test_inputs = test_set.subMatColumns(0,inputsize); 00121 test_inputs -= meanvec; 00122 test_inputs /= stddevvec; 00123 } 00124 00125 void normalizeDataSet(Mat& m) // substract mean, and divide by stddev (these are estimated globally) 00126 { 00127 Vec meanvec(m.width()); 00128 Vec stddevvec(m.width()); 00129 computeMeanAndStddev(m,meanvec,stddevvec); 00130 m -= meanvec; 00131 m /= stddevvec; 00132 } 00133 void splitTrainValidTest(VMat &data_set,VMat &train_set,VMat &valid_set, 00134 real valid_fraction,VMat &test_set, real test_fraction, 00135 bool normalize) 00136 { 00137 int nvalid = int((real)data_set.length()*valid_fraction); 00138 int ntest = int((real)data_set.length()*test_fraction); 00139 int ntrain = data_set.length()-(nvalid+ntest); 00140 00141 train_set = data_set.subMatRows(0,ntrain); 00142 valid_set = data_set.subMatRows(ntrain, nvalid); 00143 test_set = data_set.subMatRows(ntrain+nvalid,ntest); 00144 if (normalize){ 00145 VMat train_set_inputs=train_set.subMatColumns(0,data_set.width()-1); 00146 VMat valid_set_inputs=valid_set.subMatColumns(0,data_set.width()-1); 00147 VMat test_set_inputs = test_set.subMatColumns(0,data_set.width()-1); 00148 normalizeDataSets(train_set_inputs,valid_set_inputs,test_set_inputs); 00149 } 00150 } 00151 VMat reduceInputSize(real fraction,VMat data) 00152 { 00153 int n_inputs=data->width()-1; 00154 int reduce_n_inputs=(int)(fraction*n_inputs); 00155 cout<<"use "<<reduce_n_inputs<<" of "<<n_inputs<<endl; 00156 VMat new_data = data.subMatColumns(n_inputs-reduce_n_inputs,1+reduce_n_inputs); 00157 return new_data; 00158 } 00159 VMat reduceDataSetSize(real fraction,VMat data) 00160 { 00161 int n_examples=data->length(); 00162 int new_n_examples=(int)(fraction*n_examples); 00163 return data.subMatRows(0,new_n_examples); 00164 } 00165 00166 // remaps classnums from {0,1} to {-1,+1} 00167 void remapClassnums(VMat& data, real remap_minval_to, real remap_maxval_to) 00168 { 00169 // Map classnums in last row from 0,1 to -1,1 00170 int inputsize = data.width()-1; 00171 for(int i=0; i<data.length(); i++) 00172 { 00173 if(data(i,inputsize)<=0.0) 00174 data->put(i,inputsize,remap_minval_to); 00175 else 00176 data->put(i,inputsize,remap_maxval_to); 00177 } 00178 } 00179 00180 VMat loadBreastCancerWisconsin(bool normalize, bool uniq) 00181 { 00182 Mat data; 00183 if(uniq) 00184 loadAscii("DBDIR:Breast/breast-cancer-wisconsin-uniq.amat",data); 00185 else 00186 loadAscii("DBDIR:Breast/breast-cancer-wisconsin.amat",data); 00187 if(normalize) 00188 { 00189 Mat datainput = data.subMatColumns(0,data.width()-1); 00190 normalizeDataSet(datainput); 00191 } 00192 shuffleRows(data); 00193 return VMat(data); 00194 } 00195 00196 int loadBreastCancer(VMat& training_set, VMat& validation_set, VMat& test_set, int ntrain, int nvalid, bool uniq) 00197 { 00198 Mat data; 00199 if(uniq) 00200 loadAscii("DBDIR:Breast/breast-cancer-wisconsin-uniq.amat",data); 00201 else 00202 loadAscii("DBDIR:Breast/breast-cancer-wisconsin.amat",data); 00203 00204 shuffleRows(data); 00205 00206 // split the data into training_set and test_set 00207 int ntest = data.length()-(ntrain+nvalid); 00208 Mat training_data = data.subMatRows(0,ntrain); 00209 Mat validation_data = data.subMatRows(ntrain, nvalid); 00210 Mat test_data = data.subMatRows(ntrain+nvalid,ntest); 00211 00212 // normalize the inputs 00213 normalizeDataSets(training_data,validation_data,test_data); 00214 00215 training_set = VMat(training_data); 00216 validation_set = VMat(validation_data); 00217 test_set = VMat(test_data); 00218 return 2; // 2 classes 00219 } 00220 00221 VMat loadPimaIndians(bool normalize) 00222 { 00223 Mat data = loadUCIMLDB("UCI_MLDB_REP:pima-indians-diabetes/pima-indians-diabetes.data"); 00224 if(normalize) 00225 { 00226 Mat datainput = data.subMatColumns(0,data.width()-1); 00227 normalizeDataSet(datainput); 00228 } 00229 shuffleRows(data); 00230 return VMat(data); 00231 } 00232 00233 VMat loadHousing(bool normalize) 00234 { 00235 Mat data; 00236 loadGnuplot("UCI_MLDB_REP:housing/housing.data", data); 00237 Mat inputs = data.subMatColumns(0,13); 00238 Mat targets = data.subMatColumns(13,1); 00239 if (normalize) 00240 { 00241 // normalize the inputs 00242 normalizeDataSet(inputs); 00243 // put the targets in a nicer range by dividing by 100 00244 targets *= real(0.01); 00245 } 00246 return VMat(data); 00247 } 00248 00249 VMat loadSonar() 00250 { 00251 Mat data = loadUCIMLDB("UCI_MLDB_REP:undocumented/connectionist-bench/sonar/sonar.all-data"); 00252 shuffleRows(data); 00253 // no need to normalize 00254 return VMat(data); 00255 } 00256 00257 VMat loadIonosphere() 00258 { 00259 Mat data = loadUCIMLDB("UCI_MLDB_REP:ionosphere/ionosphere.data"); 00260 shuffleRows(data); 00261 // no need to normalize 00262 return VMat(data); 00263 } 00264 00265 VMat loadDiabetes(bool normalize) 00266 { 00267 Mat data; 00268 loadAscii("DBDIR:Diabetes/diabetes.amat",data); 00269 00270 if(normalize) 00271 { 00272 Mat datainput = data.subMatColumns(0,data.width()-1); 00273 normalizeDataSet(datainput); 00274 } 00275 shuffleRows(data); 00276 return VMat(data); 00277 } 00278 00279 int loadDiabetes(VMat& training_set, VMat& validation_set, VMat& test_set, int ntrain, int nvalid) 00280 { 00281 Mat data; 00282 loadAscii("DBDIR:Diabetes/diabetes.amat",data); 00283 00284 shuffleRows(data); 00285 00286 // split the data into training_data and test_data 00287 int ntest = data.length()-(ntrain+nvalid); 00288 Mat training_data = data.subMatRows(0,ntrain); 00289 Mat validation_data = data.subMatRows(ntrain, nvalid); 00290 Mat test_data = data.subMatRows(ntrain+nvalid,ntest); 00291 00292 // normalize the inputs 00293 normalizeDataSets(training_data,validation_data,test_data); 00294 00295 training_set = VMat(training_data); 00296 validation_set = VMat(validation_data); 00297 test_set = VMat(test_data); 00298 return 2; // 2 classes 00299 } 00300 00301 int loadATT800(VMat& training_set, VMat& test_set) 00302 { 00303 Mat data; 00304 loadAscii("DBDIR:ATT800/att800.amat",data); 00305 00306 // preprocessing the data: 00307 Mat durations = data.subMatColumns(0,12); 00308 Mat daytimes = data.subMatColumns(12,24); 00309 Mat classnums = data.column(36); 00310 00311 Mat newdata(data.length(), data.width()+2); 00312 Mat new_total_durations = newdata.column(0); 00313 Mat new_durations = newdata.subMatColumns(1,12); 00314 Mat new_total_daytimes = newdata.column(13); 00315 Mat new_daytimes = newdata.subMatColumns(14,24); 00316 Mat new_classnums = newdata.column(38); 00317 00318 new_durations << durations; 00319 new_daytimes << daytimes; 00320 new_classnums << classnums; 00321 for(int i=0; i<data.length(); i++) 00322 { 00323 new_total_durations(i,0) = sum(new_durations(i), false); 00324 if(new_total_durations(i,0) > 0.0) 00325 { 00326 Vec new_durations_i = new_durations(i); 00327 new_durations_i /= new_total_durations(i,0); 00328 } 00329 new_total_daytimes(i,0) = sum(new_daytimes(i), false); 00330 if(new_total_daytimes(i,0) > 0.0) 00331 { 00332 Vec new_daytimes_i = new_daytimes(i); 00333 new_daytimes_i /= new_total_daytimes(i,0); 00334 } 00335 } 00336 00337 shuffleRows(newdata); 00338 Mat training_data = newdata.subMatRows(0,400); 00339 Mat test_data = newdata.subMatRows(100,185); 00340 00341 // normalize the new inputs... 00342 normalizeDataSets(training_data,test_data); 00343 00344 training_set = VMat(training_data); 00345 test_set = VMat(test_data); 00346 return 2; // 2 classes 00347 } 00348 00349 VMat loadLetters(bool normalize) 00350 { 00351 Mat letters; 00352 loadAscii("DBDIR:Letter/letter.amat",letters); 00353 00354 if(normalize) 00355 { 00356 Mat datainput = letters.subMatColumns(0,letters.width()-1); 00357 normalizeDataSet(datainput); 00358 } 00359 00360 return VMat(letters); 00361 } 00362 00363 00364 VMat loadLetters(const char* class0, const char* class1, bool normalize) 00365 { 00366 int letter_classnum[26]; 00367 for(int l=0; l<26; l++) 00368 letter_classnum[l] = -1; 00369 for(unsigned int i=0; i<strlen(class0); i++) 00370 letter_classnum[class0[i]-'A'] = 0; 00371 for(unsigned int i=0; i<strlen(class1); i++) 00372 letter_classnum[class1[i]-'A'] = 1; 00373 00374 Mat letters; 00375 loadAscii("DBDIR:Letter/letter.amat",letters); 00376 00377 int nkeptsamples = 0; 00378 for(int i=0; i<letters.length(); i++) 00379 if(letter_classnum[int(letters(i,letters.width()-1))] >= 0) 00380 nkeptsamples++; 00381 00382 Mat keptletters(nkeptsamples, letters.width()); 00383 int n = 0; 00384 for(int i=0; i<letters.length(); i++) 00385 { 00386 int classnum = letter_classnum[int(letters(i,letters.width()-1))]; 00387 if(classnum >= 0) 00388 { 00389 keptletters(n) << letters(i); 00390 keptletters(n,keptletters.width()-1) = classnum; 00391 n++; 00392 } 00393 } 00394 00395 if(normalize) 00396 { 00397 Mat datainput = keptletters.subMatColumns(0,keptletters.width()-1); 00398 normalizeDataSet(datainput); 00399 } 00400 00401 return VMat(keptletters); 00402 } 00403 00404 int loadLetters(VMat& training_set, VMat& validation_set, VMat& test_set, char* which_letters, real validation_fraction, real test_fraction, bool do_shuffle) 00405 { 00406 int letter_classnum[26]; 00407 for(int l=0; l<26; l++) 00408 letter_classnum[l] = -1; 00409 int classnum = 0; 00410 for(unsigned int i=0; i<strlen(which_letters); i++) 00411 letter_classnum[which_letters[i]-'A'] = classnum++; 00412 00413 Mat letters; 00414 loadAscii("DBDIR:Letter/letter.amat",letters); 00415 00416 Mat keptletters(letters.length(),letters.width()); 00417 int k=0; 00418 for(int i=0; i<letters.length(); i++) 00419 { 00420 int c = letter_classnum[(int)letters(i,letters.width()-1)]; 00421 if(c!=-1) 00422 { 00423 keptletters(k) << letters(i); 00424 keptletters(k,keptletters.width()-1) = c; 00425 k++; 00426 } 00427 } 00428 keptletters.resize(k,letters.width()); 00429 00430 letters = keptletters.copy(); 00431 00432 // free memory used by keptletters 00433 keptletters = Mat(); 00434 if (do_shuffle){ 00435 shuffleRows(letters); 00436 } 00437 int nvalid = int((real)letters.length()*validation_fraction); 00438 int ntest = int((real)letters.length()*test_fraction); 00439 int ntrain = letters.length()-(nvalid+ntest); 00440 00441 Mat training_data = letters.subMatRows(0,ntrain); 00442 Mat validation_data = letters.subMatRows(ntrain, nvalid); 00443 Mat test_data = letters.subMatRows(ntrain+nvalid,ntest); 00444 00445 // normalize the inputs 00446 normalizeDataSets(training_data,validation_data,test_data); 00447 00448 training_set = VMat(training_data); 00449 validation_set = VMat(validation_data); 00450 test_set = VMat(test_data); 00451 return int(strlen(which_letters)); 00452 } 00453 00454 VMat loadLetters(int n_letters, bool do_shuffle) 00455 { 00456 if (n_letters > 26 || n_letters < 1) 00457 PLERROR("In loadLetters: alphabet is at most 26 letters (and at least 1 letter)!"); 00458 int letter_classnum[26]; 00459 for(int l=0; l<26; l++) 00460 letter_classnum[l] = -1; 00461 int classnum = 0; 00462 int letter = 0; 00463 for(int i=0; i<n_letters; i++) 00464 letter_classnum[letter++] = classnum++; 00465 00466 Mat letters; 00467 loadAscii("DBDIR:Letter/letter.amat",letters); 00468 00469 Mat keptletters(letters.length(),letters.width()); 00470 int k=0; 00471 for(int i=0; i<letters.length(); i++) 00472 { 00473 int c = letter_classnum[(int)letters(i,letters.width()-1)]; 00474 if(c!=-1) 00475 { 00476 keptletters(k) << letters(i); 00477 keptletters(k,keptletters.width()-1) = c; 00478 k++; 00479 } 00480 } 00481 keptletters.resize(k,letters.width()); 00482 00483 letters = keptletters.copy(); 00484 00485 // free memory used by keptletters 00486 keptletters = Mat(); 00487 if (do_shuffle){ 00488 shuffleRows(letters); 00489 } 00490 return VMat(letters); 00491 } 00492 00493 int loadLetters(VMat& training_set, VMat& validation_set, VMat& test_set, int n_letters, real validation_fraction, real test_fraction, bool do_shuffle) 00494 { 00495 VMat letters=loadLetters(n_letters,do_shuffle); 00496 int nvalid = int((real)letters.length()*validation_fraction); 00497 int ntest = int((real)letters.length()*test_fraction); 00498 int ntrain = letters.length()-(nvalid+ntest); 00499 00500 Mat training_data = letters.subMatRows(0,ntrain); 00501 Mat validation_data = letters.subMatRows(ntrain, nvalid); 00502 Mat test_data = letters.subMatRows(ntrain+nvalid,ntest); 00503 00504 // normalize the inputs 00505 normalizeDataSets(training_data,validation_data,test_data); 00506 00507 training_set = VMat(training_data); 00508 validation_set = VMat(validation_data); 00509 test_set = VMat(test_data); 00510 return n_letters; 00511 } 00512 00513 void loadCorelDatamat(int classnum, Mat& train, Mat& valid, Mat& test) 00514 { 00515 int len; 00516 int width = 16*16*16*2; 00517 PPath filename; 00518 00519 // Load train 00520 { 00521 filename = "DBDIR:Corel/train/size" + tostring(classnum); 00522 ifstream sizein(filename.c_str()); // TODO: use a PStream? 00523 sizein >> len; 00524 Mat datamat(len, width); 00525 00526 filename = "DBDIR:Corel/train/histo" + tostring(classnum); 00527 ifstream datain(filename.c_str()); 00528 #ifdef USEFLOAT 00529 datain.read((char*)datamat.data(), len*width*4); 00530 #ifdef LITTLEENDIAN 00531 reverse_float(datamat.data(), len*width); 00532 #endif 00533 #else 00534 PLERROR("In loadCorelDatamat USEDOUBLE case not yet implemented correctly"); 00535 #endif 00536 // Now copy only the useful features 00537 train.resize(len,width/2); 00538 for(int i=0; i<train.length(); i++) 00539 for(int j=0; j<train.width(); j++) 00540 train(i,j) = datamat(i,2*j); 00541 } 00542 00543 // Load valid 00544 { 00545 filename = "DBDIR:Corel/valid/size" + tostring(classnum); 00546 ifstream sizein(filename.c_str()); 00547 sizein >> len; 00548 Mat datamat(len, width); 00549 00550 filename = "DBDIR:Corel/valid/histo" + tostring(classnum); 00551 ifstream datain(filename.c_str()); 00552 #ifdef USEFLOAT 00553 datain.read((char*)datamat.data(), len*width*4); 00554 #ifdef BIGENDIAN 00555 reverse_float(datamat.data(), len*width); 00556 #endif 00557 #else 00558 PLERROR("In loadCorelDatamat USEDOUBLE case not yet implemented correctly"); 00559 #endif 00560 00561 // Now copy only the useful features 00562 valid.resize(len,width/2); 00563 for(int i=0; i<valid.length(); i++) 00564 for(int j=0; j<valid.width(); j++) 00565 valid(i,j) = datamat(i,2*j); 00566 } 00567 00568 // Load test 00569 { 00570 filename = "DBDIR:Corel/test/size" + tostring(classnum); 00571 ifstream sizein(filename.c_str()); 00572 sizein >> len; 00573 Mat datamat(len, width); 00574 00575 filename = "DBDIR:Corel/test/histo" + tostring(classnum); 00576 ifstream datain(filename.c_str()); 00577 #ifdef USEFLOAT 00578 datain.read((char*)datamat.data(), len*width*4); 00579 #ifdef BIGENDIAN 00580 reverse_float(datamat.data(), len*width); 00581 #endif 00582 #else 00583 PLERROR("In loadCorelDatamat USEDOUBLE case not yet implemented correctly"); 00584 #endif 00585 00586 // Now copy only the useful features 00587 test.resize(len,width/2); 00588 for(int i=0; i<test.length(); i++) 00589 for(int j=0; j<test.width(); j++) 00590 test(i,j) = datamat(i,2*j); 00591 } 00592 } 00593 00594 Mat smoothCorelHisto(Mat& data) 00595 { 00596 Mat res(data.length(), 7*7*7); 00597 for(int n=0; n<data.length(); n++) 00598 { 00599 real* r = res[n]; 00600 real* d = data[n]; 00601 for(int i=0; i<7; i++) 00602 for(int j=0; j<7; j++) 00603 for(int k=0; k<7; k++,r++) 00604 { 00605 *r += 0.15*d[i*2*16*16+j*2*16+k*2]; 00606 *r += 0.35*d[(i*2+1)*16*16+(j*2+1)*16+k*2+1]; 00607 *r += 0.35*d[(i*2+2)*16*16+(j*2+2)*16+k*2+2]; 00608 *r += 0.15*d[(i*2+3)*16*16+(j*2+3)*16+k*2+3]; 00609 } 00610 } 00611 return res; 00612 } 00613 00614 void loadCorel(Mat& training_set, Mat& validation_set, Mat& test_set, int negative_class, int positive_class) 00615 { 00616 // A is the negative class (will have 0 classnums) 00617 // B is the positive class (will have 1 classnums) 00618 00619 Mat trainA, validA, testA; 00620 Mat trainB, validB, testB; 00621 00622 loadCorelDatamat(negative_class, trainA, validA, testA); 00623 trainA = smoothCorelHisto(trainA); 00624 validA = smoothCorelHisto(validA); 00625 testA = smoothCorelHisto(testA); 00626 loadCorelDatamat(positive_class, trainB, validB, testB); 00627 trainB = smoothCorelHisto(trainB); 00628 validB = smoothCorelHisto(validB); 00629 testB = smoothCorelHisto(testB); 00630 int inputsize = trainA.width(); 00631 00632 training_set.resize(trainA.length()+trainB.length(), inputsize+1); 00633 Mat trainingAinputs = training_set.subMat(0, 0, trainA.length(), inputsize); 00634 Mat trainingAclassnums = training_set.subMat(0, inputsize, trainA.length(), 1); 00635 Mat trainingBinputs = training_set.subMat(trainA.length(), 0, trainB.length(), inputsize); 00636 Mat trainingBclassnums = training_set.subMat(trainA.length(), inputsize, trainB.length(), 1); 00637 trainingAinputs << trainA; 00638 trainingAclassnums.fill(0.0); 00639 trainingBinputs << trainB; 00640 trainingBclassnums.fill(1.0); 00641 shuffleRows(training_set); 00642 00643 validation_set.resize(validA.length()+validB.length(), inputsize+1); 00644 Mat validAinputs = validation_set.subMat(0, 0, validA.length(), inputsize); 00645 Mat validAclassnums = validation_set.subMat(0, inputsize, validA.length(), 1); 00646 Mat validBinputs = validation_set.subMat(validA.length(), 0, validB.length(), inputsize); 00647 Mat validBclassnums = validation_set.subMat(validA.length(), inputsize, validB.length(), 1); 00648 validAinputs << validA; 00649 validAclassnums.fill(0.0); 00650 validBinputs << validB; 00651 validBclassnums.fill(1.0); 00652 shuffleRows(validation_set); 00653 00654 test_set.resize(testA.length()+testB.length(), inputsize+1); 00655 Mat testAinputs = test_set.subMat(0, 0, testA.length(), inputsize); 00656 Mat testAclassnums = test_set.subMat(0, inputsize, testA.length(), 1); 00657 Mat testBinputs = test_set.subMat(testA.length(), 0, testB.length(), inputsize); 00658 Mat testBclassnums = test_set.subMat(testA.length(), inputsize, testB.length(), 1); 00659 testAinputs << testA; 00660 testAclassnums.fill(0.0); 00661 testBinputs << testB; 00662 testBclassnums.fill(1.0); 00663 shuffleRows(test_set); 00664 } 00665 00666 void loadCallxx(int year, VMat& d) 00667 { 00668 Mat data; 00669 PPath filename = "DBDIR:Finance/call" + tostring(year) + ".stc.data"; 00670 loadAscii(filename, data); 00671 d = VMat(data); 00672 } 00673 00674 00675 void loadUSPS(VMat& trainset, VMat& testset, bool use_smooth) 00676 { 00677 Mat traininputs; 00678 Mat testinputs; 00679 Mat traindesired; 00680 Mat testdesired; 00681 00682 if(use_smooth) 00683 { 00684 traininputs = loadSNMat("DBDIR:usps/train-patterns-smoo.mat"); 00685 testinputs = loadSNMat("DBDIR:usps/test-patterns-smoo.mat"); 00686 } 00687 else 00688 { 00689 traininputs = loadSNMat("DBDIR:usps/ocr16-train.mat"); 00690 testinputs = loadSNMat("DBDIR:usps/ocr16-test.mat"); 00691 } 00692 //traininputs += 1.0; 00693 //traininputs /= 2.0; 00694 //testinputs += 1.0; 00695 //testinputs /= 2.0; 00696 00697 traindesired = loadSNMat("DBDIR:usps/train-desired.mat"); 00698 Mat trainclasses(traininputs.length(),1); 00699 for(int i=0; i<traindesired.length(); i++) 00700 trainclasses(i,0) = argmax(traindesired(i)); 00701 00702 testdesired = loadSNMat("DBDIR:usps/test-desired.mat"); 00703 Mat testclasses(testinputs.length(),1); 00704 for(int i=0; i<testdesired.length(); i++) 00705 testclasses(i,0) = argmax(testdesired(i)); 00706 00707 trainset = hconcat(traininputs,trainclasses); 00708 testset = hconcat(testinputs,testclasses); 00709 } 00710 00711 VMat loadUSPS(bool use_smooth) 00712 { 00713 Mat traininputs; 00714 Mat traindesired; 00715 00716 if(use_smooth) 00717 traininputs = loadSNMat("DBDIR:usps/patterns-smoo.mat"); 00718 else 00719 traininputs = loadSNMat("DBDIR:usps/ocr16.pat"); 00720 00721 traininputs += real(1.0); 00722 traininputs /= real(2.0); 00723 00724 traindesired = loadSNMat("DBDIR:usps/desired.mat"); 00725 Mat trainclasses(traininputs.length(),1); 00726 for(int i=0; i<traindesired.length(); i++) 00727 trainclasses(i,0) = argmax(traindesired(i)); 00728 00729 Mat trainset = hconcat(traininputs,trainclasses); 00730 00731 return trainset; 00732 } 00733 00734 void loadLetters(int& inputsize, int& nclasses, VMat& trainset, VMat& testset) 00735 { 00736 Mat letters; 00737 loadAscii("DBDIR:Letter/letter.amat",letters); 00738 inputsize = letters.width()-1; 00739 nclasses = 26; 00740 trainset = VMat(letters.subMatRows(0,16000)); 00741 testset = VMat(letters.subMatRows(16000,4000)); 00742 } 00743 00744 void loadClassificationDataset(const string& datasetname, int& inputsize, int& nclasses, VMat& trainset, VMat& testset, bool normalizeinputs, VMat& allset) 00745 { 00746 string dbname = datasetname; 00747 int reduced_size = 0; 00748 vector<string> dataset_and_size = split(dbname,":"); 00749 if(dataset_and_size.size()==2) 00750 { 00751 dbname = dataset_and_size[0]; 00752 reduced_size = toint(dataset_and_size[1]); 00753 } 00754 00755 if(dbname=="2d") 00756 { 00757 trainset = input2dSet(); 00758 Mat mapping(2,2); 00759 mapping << string("-1 0 1 1"); 00760 trainset = remapLastColumn(trainset,mapping); 00761 testset = trainset; 00762 inputsize = 2; 00763 nclasses = 2; 00764 } 00765 else if(dbname=="letters") 00766 { 00767 loadLetters(inputsize, nclasses, trainset, testset); 00768 } 00769 else if(dbname=="breast") 00770 { 00771 VMat dbname_vm = loadBreastCancerWisconsin(); 00772 inputsize = dbname_vm.width()-1; 00773 nclasses = 2; 00774 split(dbname_vm,0.5,trainset,testset); 00775 } 00776 else if(dbname=="usps") 00777 { 00778 loadUSPS(trainset,testset,true); 00779 inputsize = trainset.width()-1; 00780 nclasses = 10; 00781 } 00782 else if(dbname=="mnist") 00783 { 00784 loadMNIST(trainset,testset); 00785 inputsize = trainset.width()-1; 00786 nclasses = 10; 00787 } 00788 else if(dbname=="mnist_override") 00789 { 00790 loadMNIST(trainset,testset); 00791 inputsize = trainset.width()-1; 00792 nclasses = 10; 00793 Mat m; 00794 loadPMat("mnist_override.pmat",m); 00795 if(m.width() != inputsize+1) 00796 PLERROR("mnist_overrid.pmat is espected to have a width of %d, but has %d",inputsize+1,m.width()); 00797 trainset = VMat(m); 00798 } 00799 else if(dbname.length()==5 && dbname.substr(0,4)=="usps" && dbname[4]>='0' && dbname[4]<='9') 00800 { 00801 int classnum = dbname[4]-'0'; 00802 loadUSPS(trainset,testset,true); 00803 inputsize = trainset.width()-1; 00804 trainset = remapLastColumn(trainset,classnum,1,0); 00805 testset = remapLastColumn(testset,classnum,1,0); 00806 nclasses = 2; 00807 } 00808 else if(dbname.length()==6 && dbname.substr(0,5)=="mnist" && dbname[5]>='0' && dbname[5]<='9') 00809 { 00810 int classnum = dbname[5]-'0'; 00811 loadMNIST(trainset,testset); 00812 inputsize = trainset.width()-1; 00813 trainset = remapLastColumn(trainset,classnum,1.,0.); 00814 testset = remapLastColumn(testset,classnum,1.,0.); 00815 nclasses = 2; 00816 } 00817 else if (dbname.substr(0,4) == "UCI_") { 00818 string db_spec; 00819 string type; 00820 if (dbname.substr(0,8) == "UCI_KDD_") { 00821 db_spec = dbname.substr(8); 00822 type = "KDD"; 00823 } else { 00824 db_spec = dbname.substr(4); 00825 type = "MLDB"; 00826 } 00827 00828 size_t look_for_id = db_spec.rfind("_ID="); 00829 string db_dir; 00830 string id = ""; 00831 if (look_for_id != string::npos) { 00832 // There is an ID specified. 00833 db_dir = db_spec.substr(0, look_for_id); 00834 id = db_spec.substr(look_for_id + 4); 00835 } else { 00836 db_dir = db_spec; 00837 } 00838 loadUCI(trainset, testset, allset, db_dir, id, normalizeinputs,type); 00839 inputsize = allset->inputsize(); 00840 00841 } 00842 else 00843 PLERROR("Unknown dbname %s",dbname.c_str()); 00844 00845 if(reduced_size) 00846 { 00847 trainset = trainset.subMatRows(0,reduced_size); 00848 testset = testset.subMatRows(0,reduced_size); 00849 } 00850 00851 if(normalizeinputs) 00852 { 00853 Vec meanvec; 00854 Vec stddevvec; 00855 computeMeanAndStddev(trainset, meanvec, stddevvec); 00856 meanvec = meanvec.subVec(0,inputsize); 00857 stddevvec = stddevvec.subVec(0,inputsize); 00858 for (int i = 0; i < stddevvec.length(); i++) { 00859 if (fast_exact_is_equal(stddevvec[i], 0)) { 00860 // The standard dev is 0, the row must be constant. Since we don't 00861 // want nans we put 1 instead. 00862 stddevvec[i] = 1; 00863 } 00864 } 00865 for (int i=0;i<inputsize;i++) 00866 if (fast_exact_is_equal(stddevvec[i], 0)) stddevvec[i]=1; 00867 trainset = normalize(trainset,meanvec,stddevvec); 00868 testset = normalize(testset,meanvec,stddevvec); 00869 } 00870 } 00871 00872 00874 // loadUCI // 00876 void loadUCI(VMat& trainset, VMat& testset, VMat& allset, string db_spec, string id, bool &normalize, const string& type) { 00877 string script_file = db_spec; 00878 if (id != "") { 00879 script_file += "_ID=" + id; 00880 } 00881 script_file += ".plearn"; 00882 PPath db_dir; 00883 if (type=="MLDB") { 00884 db_dir = PPath("UCI_MLDB_REP:") / db_spec; 00885 } else if (type=="KDD") { // TODO: a PPath protocol for UCI_KDD? 00886 db_dir = PPath("DBDIR:UCI_KDD") / db_spec; 00887 } else { 00888 PLERROR("In loadUCI: Unknown dataset type: %s.",type.c_str()); 00889 } 00890 Object* obj = PLearn::macroLoadObject(db_dir / script_file); 00891 PP<UCISpecification> uci_spec = static_cast<UCISpecification*>(obj); 00892 if (uci_spec->file_train != "") { 00893 if (uci_spec->format=="UCI") { 00894 loadUCISet(trainset, db_dir / uci_spec->file_train, uci_spec); 00895 } else if (uci_spec->format=="AMAT") { 00896 loadUCIAMat(trainset, db_dir / uci_spec->file_train, uci_spec); 00897 } else { 00898 PLERROR("In loadUCI: Format '%s' unsupported",uci_spec->format.c_str()); 00899 } 00900 } 00901 if (uci_spec->file_test != "") { 00902 if (uci_spec->format=="UCI") { 00903 loadUCISet(testset, db_dir / uci_spec->file_test, uci_spec); 00904 } else if (uci_spec->format=="AMAT") { 00905 loadUCIAMat(testset, db_dir / uci_spec->file_test, uci_spec); 00906 } else { 00907 PLERROR("In loadUCI: Format '%s' unsupported",uci_spec->format.c_str()); 00908 } 00909 } 00910 if (uci_spec->file_all != "") { 00911 if (uci_spec->format=="UCI") { 00912 loadUCISet(allset, db_dir / uci_spec->file_all, uci_spec); 00913 } else if (uci_spec->format=="AMAT") { 00914 loadUCIAMat(allset, db_dir / uci_spec->file_all, uci_spec); 00915 } else { 00916 PLERROR("In loadUCI: Format '%s' unsupported",uci_spec->format.c_str()); 00917 } 00918 } else { 00919 allset = vconcat(trainset, testset); 00920 } 00921 if (normalize) { 00922 int is = uci_spec->inputsize; 00923 if (is == -1) 00924 is = allset->width() - 1; 00925 VMat tmp_vmat = new ShiftAndRescaleVMatrix(allset, is, 0, true, 0); 00926 Mat new_data = tmp_vmat->toMat().subMatColumns(0, is); 00927 allset->putMat(0, 0, new_data); 00928 if (trainset && testset) { 00929 if (allset->length() != trainset->length() + testset->length()) 00930 PLERROR("In loadUCI - The whole dataset should have a length equal to train + test"); 00931 trainset->putMat(0, 0, new_data.subMatRows(0, trainset->length())); 00932 testset->putMat(0, 0, new_data.subMatRows(trainset->length(), testset->length())); 00933 } else if (trainset || testset) { 00934 PLERROR("In loadUCI - There can't be only a train set or only a test set"); 00935 } 00936 // We don't want to normalize again. 00937 normalize = false; 00938 } 00939 } 00940 00941 00942 00944 // loadUCIAMat // 00946 void loadUCIAMat(VMat& data, string file, PP<UCISpecification> uci_spec) 00947 { 00948 data = loadAsciiAsVMat(file); 00949 00950 if (uci_spec->target_is_first) { 00951 // We need to move the target to the last columns. 00952 int ts = uci_spec->targetsize; 00953 if (ts == -1) { 00954 PLERROR("In loadUCIAMat - We don't know how many columns to move"); 00955 } 00956 if (uci_spec->weightsize > 0) { 00957 PLERROR("In loadUCIAMat - Damnit, I don't like weights"); 00958 } 00959 Vec row; 00960 Vec target; 00961 00962 target.resize(ts); 00963 for (int i = 0; i < data.length(); i++) { 00964 row = data(i); 00965 target << row.subVec(0,ts); 00966 row.subVec(0, data.width() - ts ) << row.subVec(ts, data.width() - ts); 00967 row.subVec(data.width() - ts , ts) << target; 00968 data->putRow(i,row); 00969 } 00970 00971 // now, move the symbols 00972 TVec<map<string,real> > sym; 00973 int is = data.width()-ts; 00974 sym.resize(ts); 00975 for (int i=0;i<ts;i++) { 00976 sym[i] = data->getStringToRealMapping(i); 00977 } 00978 for(int i=0;i<is; i++) { 00979 data->setStringMapping(i, data->getStringToRealMapping(i+ts)); 00980 } 00981 for(int i=is;i<is+ts;i++) { 00982 data->setStringMapping(i,sym[i-is]); 00983 } 00984 } 00985 00986 data->defineSizes(uci_spec->inputsize, uci_spec->targetsize, uci_spec->weightsize); 00987 } 00988 00990 // loadUCISet // 00992 void loadUCISet(VMat& data, PP<UCISpecification> uci_spec) { 00993 PLASSERT( uci_spec ); 00994 if (!uci_spec->data_all.isEmpty()) 00995 loadUCISet(data, uci_spec->data_all.absolute(), uci_spec); 00996 else { 00997 VMat data_train, data_test; 00998 loadUCISet(data_train, uci_spec->data_train.absolute(), uci_spec); 00999 loadUCISet(data_test, uci_spec->data_test.absolute(), uci_spec); 01000 data = new ConcatRowsVMatrix(data_train, data_test, true); 01001 } 01002 } 01003 01004 void loadUCISet(VMat& data, string file, PP<UCISpecification> uci_spec) { 01005 char *** to_symbols; 01006 int * to_n_symbols; 01007 TVec<int> max_in_col; 01008 TVec<string> header_columns; 01009 Mat the_data; 01010 if (uci_spec->header_exists) { 01011 the_data = loadUCIMLDB(file, &to_symbols, &to_n_symbols, &max_in_col,&header_columns); 01012 } else { 01013 the_data = loadUCIMLDB(file, &to_symbols, &to_n_symbols, &max_in_col); 01014 } 01015 if (uci_spec->target_is_first) { 01016 // We need to move the target to the last columns. 01017 int ts = uci_spec->targetsize; 01018 if (ts == -1) { 01019 PLERROR("In loadUCISet - We don't know how many columns to move"); 01020 } 01021 if (uci_spec->weightsize > 0) { 01022 PLERROR("In loadUCISet - Damnit, I don't like weights"); 01023 } 01024 Vec row; 01025 Vec target; 01026 01027 target.resize(ts); 01028 for (int i = 0; i < the_data.length(); i++) { 01029 row = the_data(i); 01030 target << row.subVec(0,ts); 01031 row.subVec(0, the_data.width() - ts ) << row.subVec(ts, the_data.width() - ts); 01032 row.subVec(the_data.width() - ts , ts) << target; 01033 } 01034 } 01035 data = VMat(the_data); 01036 data->defineSizes(uci_spec->inputsize, uci_spec->targetsize, uci_spec->weightsize); 01037 01038 if (uci_spec->header_exists) { 01039 if (uci_spec->header_fields.size()==0) { 01040 01041 if (uci_spec->target_is_first) { 01042 int ts = uci_spec->targetsize; 01043 int is = the_data.width()-ts; 01044 TVec<string> tmp; 01045 tmp.resize(ts); 01046 tmp << header_columns.subVec(0,ts); 01047 header_columns.subVec(0,is) << header_columns.subVec(ts,is); 01048 header_columns.subVec(is,ts) << tmp; 01049 } 01050 data->declareFieldNames(header_columns); 01051 } else { 01052 TVec<string> field_names; 01053 field_names.resize(the_data.width()); 01054 int last = 0; 01055 int cnt=0; 01056 for (int i=0; i<uci_spec->header_fields.size(); i++) { 01057 for (int j=last;j<uci_spec->header_fields[i].first;j++) { 01058 field_names[j] = ""; 01059 } 01060 for (int j=uci_spec->header_fields[i].first;j<=uci_spec->header_fields[i].second;j++) { 01061 if (cnt>=header_columns.size()) { 01062 PLERROR("In loadUCISet: 'header_fields' setting is incorrect"); 01063 } 01064 field_names[j] = header_columns[cnt++]; 01065 } 01066 last = uci_spec->header_fields[i].second+1; 01067 } 01068 for (int i=last;i<field_names.size();i++) { 01069 field_names[i] = ""; 01070 } 01071 if (uci_spec->target_is_first) { 01072 int ts = uci_spec->targetsize; 01073 int is = the_data.width()-ts; 01074 TVec<string> tmp; 01075 tmp.resize(ts); 01076 tmp << field_names.subVec(0,ts); 01077 field_names.subVec(0,is) << field_names.subVec(ts,is); 01078 field_names.subVec(is,ts) << tmp; 01079 } 01080 data->declareFieldNames(field_names); 01081 } 01082 } 01083 01084 // Add symbol mappings 01085 01086 if (uci_spec->target_is_first) { 01087 int ts = uci_spec->targetsize; 01088 int is = the_data.width()-ts; 01089 TVec<char**> tmp_sym(ts); 01090 TVec<int> tmp_len(ts); 01091 for(int i=0;i<ts;i++) { 01092 tmp_sym[i] = to_symbols[i]; 01093 tmp_len[i] = to_n_symbols[i]; 01094 } 01095 for (int i=ts;i<is+ts;i++) { 01096 to_symbols[i-ts] = to_symbols[i]; 01097 to_n_symbols[i-ts] = to_n_symbols[i]; 01098 } 01099 for(int i=is;i<is+ts;i++) { 01100 to_symbols[i] = tmp_sym[i-is]; 01101 to_n_symbols[i] = tmp_len[i-is]; 01102 } 01103 01104 tmp_len << max_in_col.subVec(0,ts); 01105 max_in_col.subVec(0,is) << max_in_col.subVec(ts,is); 01106 max_in_col.subVec(is,ts) << tmp_len; 01107 } 01108 for (int j=0;j<data->width();j++) { 01109 for (int k=0;k<to_n_symbols[j];k++) { 01110 data->addStringMapping(j,string(to_symbols[j][k]),real(max_in_col[j]+k+1)); 01111 } 01112 } 01113 01114 // Free up the symbols 01115 for (int i=0; i<data->width(); i++) 01116 { 01117 for (int j=0; j<to_n_symbols[i]; j++) 01118 free(to_symbols[i][j]); 01119 free(to_symbols[i]); 01120 } 01121 free(to_symbols); 01122 free(to_n_symbols); 01123 01124 // Add default 'target' name to the target(s) column(s) if there is no fieldname yet. 01125 int is = data->inputsize(); 01126 int ts = data->targetsize(); 01127 if (ts == 1) { 01128 string f_target = data->fieldName(is); 01129 if (pl_isnumber(f_target) && toint(f_target) == is) 01130 data->declareField(is, "target"); 01131 } else { 01132 string f_target_i; 01133 for (int i = 0; i < ts; i++) { 01134 f_target_i = data->fieldName(is + i); 01135 if (pl_isnumber(f_target_i) && toint(f_target_i) == is + i) 01136 data->declareField(is + i, "target_" + tostring(i)); 01137 } 01138 } 01139 } 01140 01141 } // end of namespace PLearn 01142 01143 01144 /* 01145 Local Variables: 01146 mode:c++ 01147 c-basic-offset:4 01148 c-file-style:"stroustrup" 01149 c-file-offsets:((innamespace . 0)(inline-open . 0)) 01150 indent-tabs-mode:nil 01151 fill-column:79 01152 End: 01153 */ 01154 // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=79 :