import numpy import numpy.random import pylab from dispims_color import dispims_color import logreg rng = numpy.random.RandomState(1) SMALL = 0.001 patchsize = 10 numfeatures = 100 datadir = "/home/ro/research/data/cifar/cifar-10-batches-py" def pca(data, dimstokeep): """ principal components analysis of data (columnwise in array data), retaining as many components as required to retain var_fraction of the variance """ from numpy.linalg import eigh u, v = eigh(numpy.cov(data, rowvar=0, bias=1)) v = v[:, numpy.argsort(u)[::-1]] backward_mapping = v[:,:dimstokeep].T forward_mapping = v[:,:dimstokeep] return backward_mapping.astype("float32"), forward_mapping.astype("float32"), numpy.dot(v[:,:dimstokeep].astype("float32"), backward_mapping), numpy.dot(forward_mapping, v[:,:dimstokeep].T.astype("float32")) #load data trainimages = numpy.concatenate([(numpy.load(datadir+'/data_batch_'+b)['data']) for b in ["1", "2", "3", "4", "5"]], 0).reshape(-1,3,32,32).transpose(0,2,3,1).reshape(-1,32*32*3).astype("float32") trainlabels = numpy.concatenate([numpy.array(numpy.load(datadir+'/data_batch_'+str(i))['labels']).astype("int32") for i in [1,2,3,4,5]]) testimages = numpy.load(datadir+'/test_batch')['data'].reshape(-1,3,32,32).transpose(0,2,3,1).reshape(-1,32*32*3).astype("float32") testlabels = numpy.array(numpy.load(datadir+'/test_batch')['labels']).astype("int32") R = rng.permutation(trainimages.shape[0]) trainimages = trainimages[R] trainlabels = trainlabels[R] meanstd = trainimages.std() trainimages = trainimages.reshape(-1, 3072) trainimages -= trainimages.mean(1)[:,None] trainimages /= trainimages.std(1)[:,None] + 0.1 * meanstd testimages = testimages.reshape(-1, 3072) testimages -= testimages.mean(1)[:,None] testimages /= testimages.std(1)[:,None] + 0.1 * meanstd #LEARN PCA MATRICES print "whitening" trainimages_mean = trainimages.mean(0)[None,:] trainimages_std = trainimages.std(0)[None,:] trainimages -= trainimages_mean trainimages /= trainimages_std + 0.1 * meanstd pca_backward, pca_forward, zca_backward, zca_forward = pca(trainimages, dimstokeep=800) testimages -= trainimages_mean testimages /= trainimages_std + 0.1 * meanstd print "done" #dispims_color(numpy.dot(numpy.dot(trainimages[:100], pca_backward.T), pca_forward.T).reshape(100,32,32,3), 2) numtrain = 40000 numvali = 10000 alltrainimages = numpy.vstack(trainimages[:numtrain+numvali]) trainimages = alltrainimages[:numtrain] valiimages = alltrainimages[numtrain:] alltrainlabels = trainlabels valilabels = trainlabels[numtrain:] trainlabels = trainlabels[:numtrain] #CLASSIFICATION WITH CROSSVALIDATION weightcosts = [0.1, 0.001, 0.0] valicosts = [] lr = logreg.Logreg(10, trainimages.shape[1]) lr.train(trainimages.T, logreg.onehot(trainlabels).T, numsteps=100, verbose=False, weightcost=weightcosts[0]) #lr.train_cg(trainimages.T, logreg.onehot(trainlabels).T, weightcost=weightcosts[0], maxnumlinesearch=1000) valicosts.append(lr.zeroone(valiimages.T, valilabels.T)) for wcost in weightcosts[1:]: lr.train(trainimages.T, logreg.onehot(trainlabels).T, numsteps=100, verbose=False,weightcost=wcost) #lr.train_cg(trainimages.T, logreg.onehot(trainlabels).T, weightcost=wcost, maxnumlinesearch=1000) valicosts.append(lr.zeroone(valiimages.T, logreg.onehot(valilabels).T)) winningwcost = weightcosts[numpy.argmin(valicosts)] lr.train(alltrainimages.T, logreg.onehot(alltrainlabels).T, numsteps=100, verbose=False, weightcost=winningwcost) #lr.train_cg(alltrainimages.T, logreg.onehot(alltrainlabels).T, weightcost=winningwcost, maxnumlinesearch=1000) print "winning weightcost: ", winningwcost print "logreg train performance: ", 1.0 - lr.zeroone(trainimages.T, logreg.onehot(trainlabels).T) print "logreg test performance: ", 1.0 - lr.zeroone(testimages.T, logreg.onehot(testlabels).T)