Changeset 1013 for library/bdm/estim
- Timestamp:
- 05/27/10 23:07:49 (14 years ago)
- Location:
- library/bdm/estim
- Files:
-
- 4 modified
Legend:
- Unmodified
- Added
- Removed
-
library/bdm/estim/arx.cpp
r1009 r1013 72 72 } 73 73 74 void ARX::flatten ( const BMEF* B ) {74 void ARX::flatten ( const BMEF* B , double weight =1.0) { 75 75 const ARX* A = dynamic_cast<const ARX*> ( B ); 76 76 // nu should be equal to B.nu 77 est.pow ( A->posterior()._nu() / posterior()._nu() );77 est.pow ( A->posterior()._nu() / posterior()._nu() *weight); 78 78 if ( evalll ) { 79 79 last_lognc = est.lognc(); -
library/bdm/estim/arx.h
r1009 r1013 89 89 }; 90 90 double logpred ( const vec &yt, const vec &cond ) const; 91 void flatten ( const BMEF* B );91 void flatten ( const BMEF* B , double weight ); 92 92 //! Conditioned version of the predictor 93 93 enorm<ldmat>* epredictor ( const vec &rgr ) const; -
library/bdm/estim/mixtures.cpp
r1009 r1013 9 9 Coms.set_size ( c ); 10 10 weights.set_parameters ( ones ( c ) ); //assume at least one observation in each comp. 11 multiBM weights0(weights); 11 12 //est will be done at the end 12 13 // … … 18 19 Coms ( 0 )->bayes_batch ( Data ); 19 20 // Flatten it to its original shape 20 Coms ( 0 )->flatten ( Com0 ); 21 shared_ptr<BMEF> SharpCom((BMEF*)Coms(0)->_copy()); 22 Coms ( 0 )->flatten ( Com0 ); 21 23 22 24 //Copy it to the rest … … 32 34 } else { // pick at random 33 35 int ind = (int) floor ( ndat * UniRNG.sample() ); 34 Coms ( i )->bayes ( Data.get_col ( ind ), empty_vec);36 Coms ( i )->bayes_weighted ( Data.get_col ( ind ), empty_vec, ndat/Coms.length() ); 35 37 } 36 // flatten back to oringinal37 Coms ( i )->flatten ( Com0);38 //sharpen to the sharp component 39 //Coms ( i )->flatten ( SharpCom.get(), 1.0/Coms.length() ); 38 40 } 39 40 41 } 41 42 42 double MixEF::bayes_batch ( const mat &data , const mat &cond, const vec &wData ) {43 double MixEF::bayes_batch_weighted ( const mat &data , const mat &cond, const vec &wData ) { 43 44 int ndat = data.cols(); 44 45 int t, i, niter; … … 133 134 }; 134 135 135 void MixEF::bayes ( const mat &data, const vec &cond = empty_vec ) {136 this->bayes_batch ( data, cond, ones ( data.cols() ) );137 };138 139 140 136 double MixEF::logpred ( const vec &yt, const vec &cond =empty_vec) const { 141 137 … … 162 158 } 163 159 164 void MixEF::flatten ( const BMEF* M2 ) {160 void MixEF::flatten ( const BMEF* M2, double weight=1.0 ) { 165 161 const MixEF* Mix2 = dynamic_cast<const MixEF*> ( M2 ); 166 162 bdm_assert_debug ( Mix2->Coms.length() == Coms.length(), "Different no of coms" ); 167 163 //Flatten each component 168 164 for ( int i = 0; i < Coms.length(); i++ ) { 169 Coms ( i )->flatten ( Mix2->Coms ( i ) );165 Coms ( i )->flatten ( Mix2->Coms ( i ) , weight); 170 166 } 171 167 //Flatten weights = make them equal!! -
library/bdm/estim/mixtures.h
r1009 r1013 98 98 //! Recursive EM-like algorithm (QB-variant), see Karny et. al, 2006 99 99 void bayes ( const vec &yt, const vec &cond ); 100 //! EM algorithm101 void bayes ( const mat &yt, const vec &cond );102 100 //! batch weighted Bayes rule 103 double bayes_batch ( const mat &yt, const mat &cond, const vec &wData ); 101 double bayes_batch_weighted ( const mat &yt, const mat &cond, const vec &wData ); 102 double bayes_batch ( const mat &yt, const mat &cond){ 103 return bayes_batch_weighted(yt,cond,ones(yt.cols())); 104 }; 104 105 double logpred ( const vec &yt, const vec &cond ) const; 105 106 //! return correctly typed posterior (covariant return) … … 110 111 emix* epredictor(const vec &cond=vec()) const; 111 112 //! Flatten the density as if it was not estimated from the data 112 void flatten ( const BMEF* M2 );113 void flatten ( const BMEF* M2, double weight ); 113 114 //! Access function 114 115 BMEF* _Coms ( int i ) {