![]() |
GURLS++
2.0.00
C++ Implementation of GURLS Matlab Toolbox
|
RLSPegasos is the sub-class of Optimizer that implements the Pegaosos algorithm.
#include <rlspegasos.h>
Public Member Functions | |
GurlsOptionsList * | execute (const gMat2D< T > &X, const gMat2D< T > &Y, const GurlsOptionsList &opt) |
Computes a classifier for the primal formulation of RLS. | |
Static Public Member Functions | |
static Optimizer< T > * | factory (const std::string &id) throw (BadOptimizerCreation) |
Factory function returning a pointer to the newly created object. |
Definition at line 59 of file rlspegasos.h.
GurlsOptionsList * gurls::RLSPegasos< T >::execute | ( | const gMat2D< T > & | X, |
const gMat2D< T > & | Y, | ||
const GurlsOptionsList & | opt | ||
) | [virtual] |
The optimization is carried out using a stochastic gradient descent algorithm. The regularization parameter is set to the one found in the field paramsel of opt. In case of multiclass problems, the regularizers need to be combined with the function specified inthe field singlelambda of opt
X | input data matrix |
Y | labels matrix |
opt | options with the following:
|
Implements gurls::Optimizer< T >.
Definition at line 91 of file rlspegasos.h.
{ // lambda = opt.singlelambda(opt.paramsel.lambdas); const gMat2D<T> &ll = opt.getOptValue<OptMatrix<gMat2D<T> > >("paramsel.lambdas"); T lambda = opt.getOptAs<OptFunction>("singlelambda")->getValue(ll.getData(), ll.getSize()); // [n,d] = size(X); const unsigned long n = X.rows(); const unsigned long d = X.cols(); // T = size(bY,2); const unsigned long t = Y.cols(); GurlsOptionsList* optimizer = new GurlsOptionsList("optimizer"); // opt.cfr.W = zeros(d,T); gMat2D<T>* W = new gMat2D<T>(d,t); set(W->getData(), (T)0.0, d*t); optimizer->addOpt("W", new OptMatrix<gMat2D<T> >(*W)); // opt.cfr.W_sum = zeros(d,T); gMat2D<T>* W_sum = new gMat2D<T>(d,t); copy(W_sum->getData(), W->getData(), d*t); optimizer->addOpt("W_sum", new OptMatrix<gMat2D<T> >(*W_sum)); optimizer->addOpt("count", new OptNumber(0.0)); // opt.cfr.acc_last = []; // opt.cfr.acc_avg = []; // opt.cfr.t0 = ceil(norm(X(1,:))/sqrt(opt.singlelambda(opt.paramsel.lambdas))); T* row = new T[d]; getRow(X.getData(), n, d, 0, row); optimizer->addOpt("t0", new OptNumber( ceil( nrm2(d, row, 1)/sqrt(lambda)))); delete[] row; // % Run mulitple epochs // for i = 1:opt.epochs, int epochs = static_cast<int>(opt.getOptAsNumber("epochs")); GurlsOptionsList* tmp_opt = new GurlsOptionsList("opt"); GurlsOptionsList* tmp_paramsel = new GurlsOptionsList("paramsel"); tmp_opt->addOpt("paramsel", tmp_paramsel); gMat2D<T>* ret_lambdas = new gMat2D<T>(ll); tmp_paramsel->addOpt("lambdas", new OptMatrix<gMat2D<T> >(*ret_lambdas)); OptFunction* tmp_singlelambda = new OptFunction(opt.getOptAs<OptFunction>("singlelambda")->getName()); tmp_opt->addOpt("singlelambda", tmp_singlelambda); tmp_opt->addOpt("optimizer", optimizer); for(int i=0; i<epochs; ++i) { // if opt.cfr.count == 0 // opt.cfr.t0 = ceil(norm(X(1,:))/sqrt(opt.singlelambda(opt.paramsel.lambdas))); // fprintf('\n\tt0 is set to : %f\n', opt.cfr.t0); // end // opt.cfr = rls_pegasos_singlepass(X, bY, opt); GurlsOptionsList* result = rls_pegasos_driver(X.getData(), Y.getData(), *tmp_opt, n, d, Y.rows(), t); tmp_opt->removeOpt("optimizer"); tmp_opt->addOpt("optimizer", result); } optimizer = tmp_opt->getOptAs<GurlsOptionsList>("optimizer"); tmp_opt->removeOpt("optimizer", false); delete tmp_opt; // cfr = opt.cfr; // cfr.W = opt.cfr.W_sum/opt.cfr.count; T count = static_cast<T>(optimizer->getOptAsNumber("count")); if(eq(count, (T)0.0)) throw gException(Exception_Illegal_Argument_Value); W = &(optimizer->getOptValue<OptMatrix<gMat2D<T> > >("W")); W_sum = &(optimizer->getOptValue<OptMatrix<gMat2D<T> > >("W_sum")); set(W->getData(), (T)0.0, W->getSize()); axpy(W->getSize(), (T)(1.0/count), W_sum->getData(), 1, W->getData(), 1); return optimizer; }
static Optimizer<T>* gurls::Optimizer< T >::factory | ( | const std::string & | id | ) | throw (BadOptimizerCreation) [inline, static, inherited] |
Definition at line 130 of file optimization.h.
{ if(id == "rlsauto") return new RLSAuto<T>; if(id == "rlsprimal") return new RLSPrimal<T>; if(id == "rlsprimalr") return new RLSPrimalr<T>; if(id == "rlsdual") return new RLSDual<T>; if(id == "rlsdualr") return new RLSDualr<T>; if(id == "rlspegasos") return new RLSPegasos<T>; if(id == "rlsgpregr") return new RLSGPRegr<T>; if(id == "rlsprimalrecinit") return new RLSPrimalRecInit<T>; if(id == "rlsprimalrecupdate") return new RLSPrimalRecUpdate<T>; if(id == "rlsrandfeats") return new RLSRandFeats<T>; throw BadOptimizerCreation(id); }