![]() |
GURLS++
2.0.00
C++ Implementation of GURLS Matlab Toolbox
|
00001 /* 00002 * The GURLS Package in C++ 00003 * 00004 * Copyright (C) 2011-1013, IIT@MIT Lab 00005 * All rights reserved. 00006 * 00007 * authors: M. Santoro 00008 * email: msantoro@mit.edu 00009 * website: http://cbcl.mit.edu/IIT@MIT/IIT@MIT.html 00010 * 00011 * Redistribution and use in source and binary forms, with or without 00012 * modification, are permitted provided that the following conditions 00013 * are met: 00014 * 00015 * * Redistributions of source code must retain the above 00016 * copyright notice, this list of conditions and the following 00017 * disclaimer. 00018 * * Redistributions in binary form must reproduce the above 00019 * copyright notice, this list of conditions and the following 00020 * disclaimer in the documentation and/or other materials 00021 * provided with the distribution. 00022 * * Neither the name(s) of the copyright holders nor the names 00023 * of its contributors or of the Massacusetts Institute of 00024 * Technology or of the Italian Institute of Technology may be 00025 * used to endorse or promote products derived from this software 00026 * without specific prior written permission. 00027 * 00028 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 00029 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 00030 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 00031 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 00032 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 00033 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 00034 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 00035 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 00036 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 00037 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 00038 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 00039 * POSSIBILITY OF SUCH DAMAGE. 00040 */ 00041 00042 00043 #ifndef _GURLS_HOGPREGR_H_ 00044 #define _GURLS_HOGPREGR_H_ 00045 00046 #include <cmath> 00047 00048 #include "gurls++/options.h" 00049 #include "gurls++/optlist.h" 00050 #include "gurls++/gmat2d.h" 00051 #include "gurls++/gvec.h" 00052 #include "gurls++/gmath.h" 00053 00054 #include "gurls++/paramsel.h" 00055 #include "gurls++/perf.h" 00056 00057 #include "gurls++/rlsgp.h" 00058 #include "gurls++/predgp.h" 00059 00060 namespace gurls { 00061 00067 template <typename T> 00068 class ParamSelHoGPRegr: public ParamSelection<T>{ 00069 00070 public: 00087 GurlsOptionsList* execute(const gMat2D<T>& X, const gMat2D<T>& Y, const GurlsOptionsList& opt); 00088 }; 00089 00090 template <typename T> 00091 GurlsOptionsList *ParamSelHoGPRegr<T>::execute(const gMat2D<T>& X, const gMat2D<T>& Y, const GurlsOptionsList &opt) 00092 { 00093 // [n,T] = size(y); 00094 const unsigned long n = Y.rows(); 00095 const unsigned long t = Y.cols(); 00096 00097 const unsigned long d = X.cols(); 00098 00099 // tot = opt.nlambda; 00100 int tot = static_cast<int>(opt.getOptAsNumber("nlambda")); 00101 00102 00103 // K = opt.kernel.K; 00104 const gMat2D<T> &K = opt.getOptValue<OptMatrix<gMat2D<T> > >("kernel.K"); 00105 00106 const GurlsOptionsList* split = opt.getOptAs<GurlsOptionsList>("split"); 00107 00108 const gMat2D< unsigned long > &indices_mat = split->getOptValue<OptMatrix<gMat2D< unsigned long > > >("indices"); 00109 const gMat2D< unsigned long > &lasts_mat = split->getOptValue<OptMatrix<gMat2D< unsigned long > > >("lasts"); 00110 00111 const unsigned long *lasts = lasts_mat.getData(); 00112 const unsigned long *indices = indices_mat.getData(); 00113 00114 00115 unsigned long *tr = new unsigned long[indices_mat.cols()]; 00116 unsigned long *va; 00117 00118 00119 T lmin; 00120 T lmax; 00121 00122 if(opt.hasOpt("lambdamin")) 00123 lmin = opt.getOptAsNumber("lambdamin"); 00124 else 00125 lmin = 0.001; 00126 00127 if(opt.hasOpt("lambdamax")) 00128 lmax = opt.getOptAsNumber("lambdamax"); 00129 else 00130 lmax = 10; 00131 00132 00133 00134 // guesses = lmin.*(lmax/lmin).^linspace(0,1,tot); 00135 T* guesses = new T[tot]; 00136 00137 T* linspc = new T[tot]; 00138 linspace((T)0.0, (T)1.0, tot, linspc); 00139 const T coeff = lmax/lmin; 00140 00141 for(int i=0; i< tot; ++i) 00142 guesses[i] = lmin* std::pow(coeff, linspc[i]); 00143 00144 delete[] linspc; 00145 00146 GurlsOptionsList* nestedOpt = new GurlsOptionsList("nested"); 00147 nestedOpt->copyOpt("singlelambda", opt); 00148 00149 00150 GurlsOptionsList* tmpPredKernel = new GurlsOptionsList("predkernel"); 00151 GurlsOptionsList* tmpKernel = new GurlsOptionsList("kernel"); 00152 GurlsOptionsList* tmpParamSel = new GurlsOptionsList("paramsel"); 00153 00154 nestedOpt->addOpt("kernel", tmpKernel); 00155 nestedOpt->addOpt("predkernel", tmpPredKernel); 00156 nestedOpt->addOpt("paramsel", tmpParamSel); 00157 00158 gMat2D<T> subXtr; 00159 gMat2D<T> subYtr; 00160 gMat2D<T> subXva; 00161 gMat2D<T> subYva; 00162 00163 gMat2D<T>* subK = new gMat2D<T>(); 00164 gMat2D<T>* subPredK = new gMat2D<T>(); 00165 gMat2D<T>* subPredKTest = new gMat2D<T>(); 00166 00167 tmpKernel->addOpt("K", new OptMatrix<gMat2D<T> > (*subK)); 00168 tmpPredKernel->addOpt("K", new OptMatrix<gMat2D<T> > (*subPredK)); 00169 tmpPredKernel->addOpt("Ktest", new OptMatrix<gMat2D<T> > (*subPredKTest)); 00170 00171 00172 RLSGPRegr<T> rlsgp; 00173 PredGPRegr<T> predgp; 00174 Performance<T>* perfClass = Performance<T>::factory(opt.getOptAsString("hoperf")); 00175 00176 const int nholdouts = static_cast<int>(opt.getOptAsNumber("nholdouts")); 00177 const unsigned long indices_cols = indices_mat.cols(); 00178 00179 T *perf = new T[tot*t]; 00180 00181 gMat2D<T>* lambdas_round_mat = new gMat2D<T>(nholdouts, t); 00182 T *lambdas_round = lambdas_round_mat->getData(); 00183 00184 gMat2D<T> *perf_mat = new gMat2D<T>(nholdouts, tot*t); 00185 00186 gMat2D<T>* guesses_mat = new gMat2D<T>(nholdouts, tot); 00187 T *ret_guesses = guesses_mat->getData(); 00188 00189 gMat2D<T> * lambda = new gMat2D<T>(1,1); 00190 tmpParamSel->addOpt("lambdas", new OptMatrix<gMat2D<T> >(*lambda)); 00191 00192 // for nh = 1:opt.nholdouts 00193 for(int nh = 0; nh < nholdouts; ++nh) 00194 { 00195 00196 // if strcmp(class(opt.split),'cell') 00197 // tr = opt.split{nh}.tr; 00198 // va = opt.split{nh}.va; 00199 // else 00200 // tr = opt.split.tr; 00201 // va = opt.split.va; 00202 // end 00203 unsigned long last = lasts[nh]; 00204 copy(tr, indices+nh, indices_cols, 1, indices_mat.rows()); 00205 va = tr+last; 00206 const unsigned long va_size = indices_cols-last; 00207 00208 // [n,T] = size(y(tr,:)); 00209 00210 // here n is last 00211 00212 // opt.kernel.K = K(tr,tr); 00213 subK->resize(last, last); 00214 copy_submatrix(subK->getData(), K.getData(), K.rows(), last, last, tr, tr); 00215 00216 00217 // opt.predkernel.K = K(va,tr); 00218 subPredK->resize(va_size, last); 00219 copy_submatrix(subPredK->getData(), K.getData(), K.rows(), va_size, last, va, tr); 00220 00221 // opt.predkernel.Ktest = diag(K(va,va)); 00222 T* tmpMat = new T[va_size*va_size]; 00223 subPredKTest->resize(va_size, 1); 00224 00225 copy_submatrix(tmpMat, K.getData(), K.rows(), va_size, va_size, va, va); 00226 copy(subPredKTest->getData(), tmpMat, va_size, 1, va_size+1); 00227 00228 delete[] tmpMat; 00229 00230 subXtr.resize(last, d); 00231 subMatrixFromRows(X.getData(), n, d, tr, last, subXtr.getData()); 00232 00233 subYtr.resize(last, t); 00234 subMatrixFromRows(Y.getData(), n, t, tr, last, subYtr.getData()); 00235 00236 subXva.resize(va_size, d); 00237 subMatrixFromRows(X.getData(), n, d, va, va_size, subXva.getData()); 00238 00239 subYva.resize(va_size, t); 00240 subMatrixFromRows(Y.getData(), n, t, va, va_size, subYva.getData()); 00241 00242 00243 // for i = 1:tot 00244 for(int i=0; i< tot; ++i) 00245 { 00246 // opt.paramsel.noises = guesses(i); 00247 lambda->getData()[0] = guesses[i]; 00248 00249 // opt.rls = rls_gpregr(X(tr,:),y(tr,:),opt); 00250 GurlsOptionsList* ret_rlsgp = rlsgp.execute(subXtr, subYtr, *nestedOpt); 00251 00252 nestedOpt->removeOpt("optimizer"); 00253 nestedOpt->addOpt("optimizer", ret_rlsgp); 00254 00255 // tmp = pred_gpregr(X(va,:),y(va,:),opt); 00256 GurlsOptionsList * pred_list = predgp.execute(subXva, subYva, *nestedOpt); 00257 00258 // opt.pred = tmp.means; 00259 nestedOpt->removeOpt("pred"); 00260 nestedOpt->addOpt("pred", pred_list->getOpt("means")); 00261 00262 pred_list->removeOpt("means", false); 00263 00264 delete pred_list; 00265 00266 00267 // opt.perf = opt.hoperf([],y(va,:),opt); 00268 GurlsOptionsList * perf_list = perfClass->execute(subXva, subYva, *nestedOpt); 00269 gMat2D<T>& forho = perf_list->getOptValue<OptMatrix<gMat2D<T> > >("forho"); 00270 00271 // for t = 1:T 00272 // perf(i,t) = opt.perf.forho(t); 00273 copy(perf+i, forho.getData(), t, tot, 1); 00274 00275 delete perf_list; 00276 } 00277 00278 // [dummy,idx] = max(perf,[],1); 00279 T* work = NULL; 00280 unsigned long* idx = new unsigned long[t]; 00281 indicesOfMax(perf, tot, t, idx, work, 1); 00282 00283 // vout.lambdas_round{nh} = guesses(idx); 00284 T* lambdas_nh = new T[t]; 00285 copyLocations(idx, guesses, t, tot, lambdas_nh); 00286 copy(lambdas_round + nh, lambdas_nh, t, nholdouts, 1); 00287 delete [] lambdas_nh; 00288 00289 // vout.perf{nh} = perf; 00290 copy(perf_mat->getData()+nh, perf, tot*t, nholdouts, 1); 00291 00292 // vout.guesses{nh} = guesses; 00293 copy(ret_guesses+nh, guesses, tot, nholdouts, 1); 00294 00295 } 00296 00297 delete nestedOpt; 00298 00299 delete[] guesses; 00300 delete perfClass; 00301 delete[] perf; 00302 00303 00304 GurlsOptionsList* paramsel; 00305 00306 if(opt.hasOpt("paramsel")) 00307 { 00308 GurlsOptionsList* tmp_opt = new GurlsOptionsList("tmp"); 00309 tmp_opt->copyOpt("paramsel", opt); 00310 00311 paramsel = GurlsOptionsList::dynacast(tmp_opt->getOpt("paramsel")); 00312 tmp_opt->removeOpt("paramsel", false); 00313 delete tmp_opt; 00314 00315 paramsel->removeOpt("lambdas_round"); 00316 paramsel->removeOpt("guesses"); 00317 paramsel->removeOpt("perf"); 00318 paramsel->removeOpt("lambdas"); 00319 } 00320 else 00321 paramsel = new GurlsOptionsList("paramsel"); 00322 00323 00324 paramsel->addOpt("lambdas_round", new OptMatrix<gMat2D<T> >(*lambdas_round_mat)); 00325 paramsel->addOpt("perf", new OptMatrix<gMat2D<T> >(*perf_mat)); 00326 paramsel->addOpt("guesses", new OptMatrix<gMat2D<T> >(*guesses_mat)); 00327 00328 00329 gMat2D<T> *l = new gMat2D<T>(1, t); 00330 00331 // if numel(vout.lambdas_round) > 1 00332 if(nholdouts>1) 00333 { 00334 T *lambdas = new T[t]; 00335 // lambdas = cell2mat(vout.lambdas_round'); 00336 // vout.lambdas = mean(lambdas); 00337 mean(lambdas_round, lambdas, nholdouts, t, t); 00338 00339 copy(l->getData(), lambdas, t); 00340 00341 delete [] lambdas; 00342 } 00343 else 00344 { 00345 // vout.lambdas = vout.lambdas_round{1}; 00346 copy(l->getData(), lambdas_round, t); 00347 } 00348 00349 paramsel->addOpt("lambdas", new OptMatrix<gMat2D<T> >(*l)); 00350 00351 return paramsel; 00352 } 00353 00354 00355 } 00356 00357 #endif // _GURLS_HOGPREGR_H_