Changeset 1013

Show
Ignore:
Timestamp:
05/27/10 23:07:49 (15 years ago)
Author:
smidl
Message:

Flatten has an extra argument

Files:
8 modified

Legend:

Unmodified
Added
Removed
  • applications/bdmtoolbox/mex/mixef_init.cpp

    r1002 r1013  
    5454        MixEF mix; 
    5555        mix.init(&(*com1),Data,no_com); 
    56         mix.bayes_batch(Data,zeros(0,Data.cols()),ones(Data.cols())); 
     56//      mix.bayes_batch(Data,zeros(0,Data.cols())); 
    5757        shared_ptr<epdf> p=mix.epredictor(vec(0)); 
    5858        UIFile prf; 
  • applications/bdmtoolbox/tutorial/userguide/mixef_basic.m

    r1005 r1013  
    55com.rgr = RV({},[],[]); 
    66 
    7 Data = randn(2,20); 
     7Data = [randn(2,50) randn(2,50)+10*ones(2,50)]; 
    88 
    99[Mix0,P0]=mixef_init(Data,com); 
    10  
    1110% show predictor 
    1211 
    1312Pred = bm_epredictor(Mix0); 
     13figure(1); 
     14hold off 
     15plot(Data(1,:), Data(2,:),'.'); 
     16hold on 
    1417epdf_2dplot(Pred); 
    1518 
     
    2932    % normalize weights 
    3033    w = exp(log_w_nn-max(log_w_nn)); 
    31     w = w/sum(w); 
     34    w = w/sum(w) 
    3235     
    3336    for c=1:n 
    3437        yt = Data(:,t); 
    35         [MixQB.Coms{c}]=bm_bayes(MixQB.Coms{c}, yt); 
     38        [MixQB.Coms{c}]=bm_bayesweighted(MixQB.Coms{c}, yt, [], w(c)); 
    3639    end 
     40    MixQB.weights = bm_bayes(MixQB.weights, w); 
    3741end 
    3842toc 
     
    4044tic 
    4145% should be the same as: 
    42 % MixQBc = bm_bayes_batch(MixQB,Data); 
     46MixQBc = bm_bayes_batch(MixQB,Data); 
    4347toc 
    4448 
    4549%% display results 
    4650 
     51PredQB = bm_epredictor(MixQB); 
    4752figure(2); 
    48 PredQB = bm_epredictor(MixQB); 
    49 epdf_2dplot(Pred); 
     53hold off 
     54plot(Data(1,:), Data(2,:),'.'); 
     55hold on 
     56epdf_2dplot(PredQB); 
    5057 
    5158%% relations between components 
     
    5663    end 
    5764end 
     65figure(3) 
    5866imagesc(Batta_dist); 
  • library/bdm/estim/arx.cpp

    r1009 r1013  
    7272} 
    7373 
    74 void ARX::flatten ( const BMEF* B ) { 
     74void ARX::flatten ( const BMEF* B , double weight =1.0) { 
    7575        const ARX* A = dynamic_cast<const ARX*> ( B ); 
    7676        // nu should be equal to B.nu 
    77         est.pow ( A->posterior()._nu() / posterior()._nu() ); 
     77        est.pow ( A->posterior()._nu() / posterior()._nu() *weight); 
    7878        if ( evalll ) { 
    7979                last_lognc = est.lognc(); 
  • library/bdm/estim/arx.h

    r1009 r1013  
    8989        }; 
    9090        double logpred ( const vec &yt, const vec &cond ) const; 
    91         void flatten ( const BMEF* B ); 
     91        void flatten ( const BMEF* B , double weight ); 
    9292        //! Conditioned version of the predictor 
    9393        enorm<ldmat>* epredictor ( const vec &rgr ) const; 
  • library/bdm/estim/mixtures.cpp

    r1009 r1013  
    99        Coms.set_size ( c ); 
    1010        weights.set_parameters ( ones ( c ) ); //assume at least one observation in each comp. 
     11        multiBM weights0(weights); 
    1112        //est will be done at the end 
    1213        // 
     
    1819        Coms ( 0 )->bayes_batch ( Data ); 
    1920        // Flatten it to its original shape 
    20         Coms ( 0 )->flatten ( Com0 ); 
     21        shared_ptr<BMEF> SharpCom((BMEF*)Coms(0)->_copy()); 
     22        Coms ( 0 )->flatten ( Com0 );  
    2123 
    2224        //Copy it to the rest 
     
    3234                } else { // pick at random 
    3335                        int ind = (int) floor ( ndat * UniRNG.sample() ); 
    34                         Coms ( i )->bayes ( Data.get_col ( ind ), empty_vec ); 
     36                        Coms ( i )->bayes_weighted ( Data.get_col ( ind ), empty_vec, ndat/Coms.length() ); 
    3537                } 
    36                 //flatten back to oringinal 
    37                 Coms ( i )->flatten ( Com0 ); 
     38                //sharpen to the sharp component 
     39                //Coms ( i )->flatten ( SharpCom.get(), 1.0/Coms.length() ); 
    3840        } 
    39  
    4041} 
    4142 
    42 double MixEF::bayes_batch ( const mat &data , const mat &cond, const vec &wData ) { 
     43double MixEF::bayes_batch_weighted ( const mat &data , const mat &cond, const vec &wData ) { 
    4344        int ndat = data.cols(); 
    4445        int t, i, niter; 
     
    133134}; 
    134135 
    135 void MixEF::bayes ( const mat &data, const vec &cond = empty_vec ) { 
    136         this->bayes_batch ( data, cond, ones ( data.cols() ) ); 
    137 }; 
    138  
    139  
    140136double MixEF::logpred ( const vec &yt, const vec &cond =empty_vec) const { 
    141137 
     
    162158} 
    163159 
    164 void MixEF::flatten ( const BMEF* M2 ) { 
     160void MixEF::flatten ( const BMEF* M2, double weight=1.0 ) { 
    165161        const MixEF* Mix2 = dynamic_cast<const MixEF*> ( M2 ); 
    166162        bdm_assert_debug ( Mix2->Coms.length() == Coms.length(), "Different no of coms" ); 
    167163        //Flatten each component 
    168164        for ( int i = 0; i < Coms.length(); i++ ) { 
    169                 Coms ( i )->flatten ( Mix2->Coms ( i ) ); 
     165                Coms ( i )->flatten ( Mix2->Coms ( i ) , weight); 
    170166        } 
    171167        //Flatten weights = make them equal!! 
  • library/bdm/estim/mixtures.h

    r1009 r1013  
    9898        //! Recursive EM-like algorithm (QB-variant), see Karny et. al, 2006 
    9999        void bayes ( const vec &yt, const vec &cond ); 
    100         //! EM algorithm 
    101         void bayes ( const mat &yt, const vec &cond ); 
    102100        //! batch weighted Bayes rule 
    103         double bayes_batch ( const mat &yt, const mat &cond, const vec &wData ); 
     101        double bayes_batch_weighted ( const mat &yt, const mat &cond, const vec &wData ); 
     102        double bayes_batch ( const mat &yt, const mat &cond){ 
     103                return bayes_batch_weighted(yt,cond,ones(yt.cols())); 
     104        }; 
    104105        double logpred ( const vec &yt, const vec &cond ) const; 
    105106        //! return correctly typed posterior (covariant return) 
     
    110111        emix* epredictor(const vec &cond=vec()) const; 
    111112        //! Flatten the density as if it was not estimated from the data 
    112         void flatten ( const BMEF* M2 ); 
     113        void flatten ( const BMEF* M2, double weight ); 
    113114        //! Access function 
    114115        BMEF* _Coms ( int i ) { 
  • library/bdm/stat/exp_family.cpp

    r996 r1013  
    377377        return pred.lognc() - lll; 
    378378} 
    379 void multiBM::flatten ( const BMEF* B ) { 
     379void multiBM::flatten ( const BMEF* B, double weight=1.0 ) { 
    380380        const multiBM* E = dynamic_cast<const multiBM*> ( B ); 
    381381        // sum(beta) should be equal to sum(B.beta) 
    382382        const vec &Eb = E->beta;//const_cast<multiBM*> ( E )->_beta(); 
    383         beta *= ( sum ( Eb ) / sum ( beta ) ); 
     383        beta *= ( sum ( Eb ) / sum ( beta ) * weight); 
    384384        if ( evalll ) { 
    385385                last_lognc = est.lognc(); 
     
    768768                dimc = _beta.length(); 
    769769        } 
    770  
    771 }; 
     770} 
  • library/bdm/stat/exp_family.h

    r1003 r1013  
    9999 
    100100        //!Flatten the posterior according to the given BMEF (of the same type!) 
    101         virtual void flatten ( const BMEF * B ) NOT_IMPLEMENTED_VOID; 
     101        virtual void flatten ( const BMEF * B, double weight=1.0 ) NOT_IMPLEMENTED_VOID;; 
    102102 
    103103 
     
    620620        double logpred ( const vec &yt ) const; 
    621621 
    622         void flatten ( const BMEF* B ); 
     622        void flatten ( const BMEF* B , double weight); 
    623623 
    624624        //! return correctly typed posterior (covariant return)