PLearn 0.1
simplenet.cc
Go to the documentation of this file.
00001 // Uncommenting this will result ina namespace-reated bug
00002 // that I scould not explain (Pascal)
00003 // #include "MatIO.h"
00004 #include <plearn/var/Var_all.h>
00005 #include <plearn/var/NaryVariable.h>
00006 #include <plearn/var/Func.h>
00007 #include <plearn/opt/GradientOptimizer.h>
00008 #include <plearn/var/VarArray.h>
00009 #include <plearn/db/databases.h>
00010 #include <plearn/math/random.h>
00011 
00012 using namespace PLearn;
00013 
00014 int main()
00015 {
00016   try
00017   {
00018     // Implantation simplenet de Pascal
00019     // cout << "tanh: " << ultrafasttanh(1) << " " << fasttanh(1) << " " << tanh(1) << endl;
00020     
00021     int nhidden = 100;
00022     int nepochs = 10;
00023     
00024     VMat trainset = loadLetters();
00025     int nclasses = 26;
00026     int inputsize = trainset.width()-1;
00027     
00028     cout << "Letters: " << trainset.length() << " samples, "
00029          << nclasses << " classes, " << inputsize << " inputs, " 
00030          << nhidden << " hidden" << endl;
00031     
00032     Var input(inputsize);
00033     Var classnum(1);
00034     
00035     Var w1(1+inputsize, nhidden);
00036     //  fill_random_normal(w1->matValue.subMatRows(1,inputsize), 0., 1./sqrt( real(inputsize) ));
00037     fill_random_uniform(w1->value, -1./sqrt(real(inputsize)), +1./sqrt(real(inputsize)));
00038     Var hidden = tanh(affine_transform(input,w1));  
00039     Var w2(1+nhidden, nclasses);
00040     //  fill_random_normal(w2->matValue.subMatRows(1,nhidden), 0., 1./sqrt(real(nhidden)));
00041     fill_random_uniform(w2->value, -1./sqrt(real(nhidden)), +1./sqrt(real(nhidden)));
00042     
00043     // Var output = sigmoid(affine_transform(hidden,w2));
00044     // Var cost = onehot_squared_loss(output, classnum, 0, 1);
00045     Var output = softmax(affine_transform(hidden,w2));
00046     Var cost = neg_log_pi(output,classnum);
00047 
00048     VarArray params = w1&w2;
00049     
00050     Var classerror = classification_loss(output, classnum);
00051     Var totalcost = meanOf(trainset, Func(input&classnum, hconcat(cost&classerror)), 1);
00052     
00053     // GradientOptimizer opt(params, totalcost, 0.01, 0.00, trainset.length()*nepochs, "simplenet.log", trainset.length());
00054     GradientOptimizer opt;
00055     opt.params = params;
00056     opt.cost = totalcost;
00057     opt.start_learning_rate = 1e-2;
00058     opt.nstages = trainset.length();
00059     opt.build();
00060     for (int i = 0; i < nepochs; i++) {
00061         VecStatsCollector statscol;
00062         opt.optimizeN(statscol);
00063         pout << (i + 1) * trainset->length() << " " << statscol.getMean() << endl;
00064     }
00065     
00066     cout << "FINISHED." << endl;
00067     return 0;
00068   }
00069   catch(const PLearnError& err)
00070   {
00071     cerr << "FATAL ERROR: " << err.message() << endl;
00072   }
00073                      
00074   return 1;
00075 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines