#include "merger.h" #include "../estim/arx.h" namespace bdm { merger_base::merger_base ( const Array > &S ): Npoints(0), DBG(false), dbg_file(0) { set_sources ( S ); } vec merger_base::merge_points ( mat &lW ) { int nu = lW.rows(); vec result; ivec indW; bool infexist = false; switch ( METHOD ) { case ARITHMETIC: result = log ( sum ( exp ( lW ) ) ); //ugly! break; case GEOMETRIC: result = sum ( lW ) / nu; break; case LOGNORMAL: vec sumlW = sum ( lW ) ; indW = find ( ( sumlW < inf ) & ( sumlW > -inf ) ); infexist = ( indW.size() < lW.cols() ); vec mu; vec lam; if ( infexist ) { mu = sumlW ( indW ) / nu; //mean of logs // mat validlW = lW.get_cols ( indW ); lam = sum ( pow ( validlW - outer_product ( ones ( validlW.rows() ), mu ), 2 ) ); } else { mu = sum ( lW ) / nu; //mean of logs lam = sum ( pow ( lW - outer_product ( ones ( lW.rows() ), mu ), 2 ) ); } // double coef = 0.0; vec sq2bl = sqrt ( 2 * beta * lam ); //this term is everywhere switch ( nu ) { case 2: coef = ( 1 - 0.5 * sqrt ( ( 4.0 * beta - 3.0 ) / beta ) ); result = coef * sq2bl + mu ; break; // case 4: == can be done similar to case 2 - is it worth it??? default: // see accompanying document merge_lognorm_derivation.lyx coef = sqrt ( 1 - ( nu + 1 ) / ( 2 * beta * nu ) ); result = log ( besselk ( ( nu - 3 ) / 2, sq2bl * coef ) ) - log ( besselk ( ( nu - 3 ) / 2, sq2bl ) ) + mu; break; } break; } if ( infexist ) { vec tmp = -inf * ones ( lW.cols() ); set_subvector ( tmp, indW, result ); return tmp; } else { return result; } } void merger_mix::merge ( ) { Array &Smp = eSmp._samples(); //aux vec &w = eSmp._w(); //aux mat Smp_ex = ones ( dim + 1, Npoints ); // Extended samples for the ARX model - the last row is ones for ( int i = 0; i < Npoints; i++ ) { set_col_part ( Smp_ex, i, Smp ( i ) ); } if ( DBG ) *dbg_file << Name ( "Smp_0" ) << Smp_ex; // Stuff for merging vec lw_src ( Npoints ); // weights of the ith source vec lw_mix ( Npoints ); // weights of the approximating mixture vec lw ( Npoints ); // tmp mat lW = zeros ( Nsources, Npoints ); // array of weights of all sources vec vec0 ( 0 ); //initialize importance weights lw_mix = 1.0; // assuming uniform grid density -- otherwise // Initial component in the mixture model mat V0 = 1e-8 * eye ( dim + 1 ); ARX A0; A0.set_statistics ( dim, V0 ); //initial guess of Mix: Mix.init ( &A0, Smp_ex, Ncoms ); //Preserve initial mixture for repetitive estimation via flattening MixEF Mix_init ( Mix ); // ============= MAIN LOOP ================== bool converged = false; int niter = 0; char dbg_str[100]; emix* Mpred = Mix.epredictor ( ); vec Mix_pdf ( Npoints ); while ( !converged ) { //Re-estimate Mix //Re-Initialize Mixture model Mix.flatten ( &Mix_init ); Mix.bayes_batch ( Smp_ex,empty_vec, w*Npoints ); delete Mpred; Mpred = Mix.epredictor ( ); // Allocation => must be deleted at the end!! Mpred->set_rv ( rv ); //the predictor predicts rv of this merger // This will be active only later in iterations!!! if ( 1. / sum_sqr ( w ) < effss_coef*Npoints ) { // Generate new samples eSmp.set_samples ( Mpred ); for ( int i = 0; i < Npoints; i++ ) { //////////// !!!!!!!!!!!!! //if ( Smp ( i ) ( 2 ) <0 ) {Smp ( i ) ( 2 ) = 0.01; } set_col_part ( Smp_ex, i, Smp ( i ) ); //Importance of the mixture //lw_mix ( i ) =Mix.logpred (Smp_ex.get_col(i) ); lw_mix ( i ) = Mpred->evallog ( Smp ( i ) ); } if ( DBG ) { cout << "Resampling =" << 1. / sum_sqr ( w ) << endl; cout << Mix.posterior().mean() << endl; cout << sum ( Smp_ex, 2 ) / Npoints << endl; cout << Smp_ex*Smp_ex.T() / Npoints << endl; } } if ( DBG ) { sprintf ( dbg_str, "Mpred_mean%d", niter ); *dbg_file << Name ( dbg_str ) << Mpred->mean(); sprintf ( dbg_str, "Mpred_var%d", niter ); *dbg_file << Name ( dbg_str ) << Mpred->variance(); sprintf ( dbg_str, "Mpdf%d", niter ); for ( int i = 0; i < Npoints; i++ ) { Mix_pdf ( i ) = Mix.logpred ( Smp_ex.get_col ( i ) ); } *dbg_file << Name ( dbg_str ) << Mix_pdf; sprintf ( dbg_str, "Smp%d", niter ); *dbg_file << Name ( dbg_str ) << Smp_ex; } //Importace weighting for ( int i = 0; i < mpdfs.length(); i++ ) { lw_src = 0.0; //======== Same RVs =========== //Split according to dependency in rvs if ( mpdfs ( i )->dimension() == dim ) { // no need for conditioning or marginalization lw_src = mpdfs ( i )->evallogcond_m ( Smp , vec(0)); } else { // compute likelihood of marginal on the conditional variable if ( mpdfs ( i )->dimensionc() > 0 ) { // Make marginal on rvc_i shared_ptr tmp_marg = Mpred->marginal ( mpdfs ( i )->_rvc() ); //compute vector of lw_src for ( int k = 0; k < Npoints; k++ ) { // Here val of tmp_marg = cond of mpdfs(i) ==> calling dls->get_cond lw_src ( k ) += tmp_marg->evallog ( dls ( i )->get_cond ( Smp ( k ) ) ); } // sprintf ( str,"marg%d",niter ); // *dbg << Name ( str ) << lw_src; } // Compute likelihood of the missing variable if ( dim > ( mpdfs ( i )->dimension() + mpdfs ( i )->dimensionc() ) ) { /////////////// // There are variales unknown to mpdfs(i) : rvzs shared_ptr tmp_cond = Mpred->condition ( rvzs ( i ) ); // Compute likelihood vec lw_dbg = lw_src; for ( int k = 0; k < Npoints; k++ ) { lw_src ( k ) += log ( tmp_cond->evallogcond ( zdls ( i )->pushdown ( Smp ( k ) ), zdls ( i )->get_cond ( Smp ( k ) ) ) ); if ( !std::isfinite ( lw_src ( k ) ) ) { lw_src ( k ) = -1e16; cout << "!"; } } } // Compute likelihood of the partial source for ( int k = 0; k < Npoints; k++ ) { lw_src ( k ) += mpdfs ( i )->evallogcond ( dls ( i )->pushdown ( Smp ( k ) ), dls ( i )->get_cond ( Smp ( k ) )); } } lW.set_row ( i, lw_src ); // do not divide by mix } lw = merger_base::merge_points ( lW ); //merge //Importance weighting lw -= lw_mix; // hoping that it is not numerically sensitive... w = exp ( lw - max ( lw ) ); //renormalize double sumw = sum ( w ); if ( std::isfinite ( sumw ) ) { w = w / sumw; } else { it_file itf ( "merg_err.it" ); itf << Name ( "w" ) << w; } if ( DBG ) { sprintf ( dbg_str, "lW%d", niter ); *dbg_file << Name ( dbg_str ) << lW; sprintf ( dbg_str, "w%d", niter ); *dbg_file << Name ( dbg_str ) << w; sprintf ( dbg_str, "lw_m%d", niter ); *dbg_file << Name ( dbg_str ) << lw_mix; } // ==== stopping rule === niter++; converged = ( niter > stop_niter ); } delete Mpred; // cout << endl; } // DEFAULTS FOR MERGER_BASE const MERGER_METHOD merger_base::DFLT_METHOD = LOGNORMAL; const double merger_base::DFLT_beta = 1.2; // DEFAULTS FOR MERGER_MIX const int merger_mix::DFLT_Ncoms = 10; const double merger_mix::DFLT_effss_coef = 0.5; }