/*! \file \brief Robust Bayesian auto-regression model \author Jan Sindelar. */ #ifndef ROBUST_H #define ROBUST_H #include #include #include #include using namespace bdm; using namespace std; using namespace itpp; const double max_range = numeric_limits::max()/10e-5; enum actions {MERGE, SPLIT}; class polyhedron; class vertex; /// A class describing a single polyhedron of the split complex. From a collection of such classes a Hasse diagram /// of the structure in the exponent of a Laplace-Inverse-Gamma density will be created. class polyhedron { /// A property having a value of 1 usually, with higher value only if the polyhedron arises as a coincidence of /// more than just the necessary number of conditions. For example if a newly created line passes through an already /// existing point, the points multiplicity will rise by 1. int multiplicity; int split_state; int merge_state; public: /// A list of polyhedrons parents within the Hasse diagram. vector parents; /// A list of polyhedrons children withing the Hasse diagram. vector children; /// All the vertices of the given polyhedron vector vertices; /// A list used for storing children that lie in the positive region related to a certain condition vector positivechildren; /// A list used for storing children that lie in the negative region related to a certain condition vector negativechildren; /// Children intersecting the condition vector neutralchildren; vector totallyneutralgrandchildren; vector totallyneutralchildren; bool totally_neutral; vector mergechildren; polyhedron* positiveparent; polyhedron* negativeparent; int message_counter; /// List of triangulation polyhedrons of the polyhedron given by their relative vertices. vector> triangulations; /// A list of relative addresses serving for Hasse diagram construction. vector kids_rel_addresses; /// Default constructor polyhedron() { multiplicity = 1; message_counter = 0; totally_neutral = NULL; } /// Setter for raising multiplicity void raise_multiplicity() { multiplicity++; } /// Setter for lowering multiplicity void lower_multiplicity() { multiplicity--; } /// An obligatory operator, when the class is used within a C++ STL structure like a vector int operator==(polyhedron polyhedron2) { return true; } /// An obligatory operator, when the class is used within a C++ STL structure like a vector int operator<(polyhedron polyhedron2) { return false; } void set_state(double state_indicator, actions action) { switch(action) { case MERGE: merge_state = (int)sign(state_indicator); break; case SPLIT: split_state = (int)sign(state_indicator); break; } } int get_state(actions action) { switch(action) { case MERGE: return merge_state; break; case SPLIT: return split_state; break; } } int number_of_children() { return children.size(); } }; /// A class for representing 0-dimensional polyhedron - a vertex. It will be located in the bottom row of the Hasse /// diagram representing a complex of polyhedrons. It has its coordinates in the parameter space. class vertex : public polyhedron { /// A dynamic array representing coordinates of the vertex vec coordinates; public: /// Default constructor vertex(); /// Constructor of a vertex from a set of coordinates vertex(vec coordinates) { this->coordinates = coordinates; } /// A method that widens the set of coordinates of given vertex. It is used when a complex in a parameter /// space of certain dimension is established, but the dimension is not known when the vertex is created. void push_coordinate(double coordinate) { coordinates = concat(coordinates,coordinate); } /// A method obtaining the set of coordinates of a vertex. These coordinates are not obtained as a pointer /// (not given by reference), but a new copy is created (they are given by value). vec get_coordinates() { return coordinates; } }; /// A class representing a polyhedron in a top row of the complex. Such polyhedron has a condition that differitiates /// it from polyhedrons in other rows. class toprow : public polyhedron { public: /// A condition used for determining the function of a Laplace-Inverse-Gamma density resulting from Bayesian estimation vec condition; /// Default constructor toprow(); /// Constructor creating a toprow from the condition toprow(vec condition) { this->condition = condition; } }; class condition { public: vec value; int multiplicity; condition(vec value) { this->value = value; multiplicity = 1; } }; //! Conditional(e) Multicriteria-Laplace-Inverse-Gamma distribution density class emlig // : eEF { /// A statistic in a form of a Hasse diagram representing a complex of convex polyhedrons obtained as a result /// of data update from Bayesian estimation or set by the user if this emlig is a prior density vector> statistic; vector> for_splitting; vector> for_merging; vector conditions; double normalization_factor; void alter_toprow_conditions(vec condition, bool should_be_added) { for(vector::iterator horiz_ref = statistic[statistic.size()-1].begin();horiz_ref::iterator vertex_ref = (*horiz_ref)->vertices.begin(); do { product = (*vertex_ref)->get_coordinates()*condition; } while(product == 0); if((product>0 && should_be_added)||(product<0 && !should_be_added)) { ((toprow*) (*horiz_ref))->condition += condition; } else { ((toprow*) (*horiz_ref))->condition -= condition; } } } void send_state_message(polyhedron* sender, bool shouldsplit, bool shouldmerge, int level) { if(shouldsplit||shouldmerge) { for(vector::iterator parent_iterator = sender->parents.begin();parent_iteratorparents.end();parent_iterator++) { polyhedron* current_parent = *parent_iterator; current_parent->message_counter++; bool is_last = (current_parent->message_counter == current_parent->number_of_children()); if(shouldmerge) { int child_state = sender->get_state(MERGE); int parent_state = current_parent->get_state(MERGE); if(parent_state == 0) { current_parent->set_state(child_state, MERGE); if(child_state == 0) { current_parent->mergechildren.push_back(sender); } } else { if(child_state == 0) { if(parent_state > 0) { sender->positiveparent = current_parent; } else { sender->negativeparent = current_parent; } } } if(is_last) { if(parent_state > 0) { for(vector::iterator merge_child = current_parent->mergechildren.begin(); merge_child < current_parent->mergechildren.end();merge_child++) { (*merge_child)->positiveparent = current_parent; } } if(parent_state < 0) { for(vector::iterator merge_child = current_parent->mergechildren.begin(); merge_child < current_parent->mergechildren.end();merge_child++) { (*merge_child)->negativeparent = current_parent; } } if(parent_state == 0) { for_merging[level+1].push_back(current_parent); } current_parent->mergechildren.clear(); } } if(shouldsplit) { current_parent->totallyneutralgrandchildren.insert(current_parent->totallyneutralgrandchildren.end(),sender->totallyneutralchildren.begin(),sender->totallyneutralchildren.end()); switch(sender->get_state(SPLIT)) { case 1: current_parent->positivechildren.push_back(sender); break; case 0: current_parent->neutralchildren.push_back(sender); if(current_parent->totally_neutral == NULL) { current_parent->totally_neutral = sender->totally_neutral; } else { current_parent->totally_neutral = current_parent->totally_neutral && sender->totally_neutral; } if(sender->totally_neutral) { current_parent->totallyneutralchildren.push_back(sender); } break; case -1: current_parent->negativechildren.push_back(sender); break; } if(is_last) { unique(current_parent->totallyneutralgrandchildren.begin(),current_parent->totallyneutralgrandchildren.end()); if((current_parent->negativechildren.size()>0&¤t_parent->positivechildren.size()>0)|| (current_parent->neutralchildren.size()>0&¤t_parent->totally_neutral==false)) { for_splitting[level+1].push_back(current_parent); current_parent->set_state(0, SPLIT); } else { if(current_parent->negativechildren.size()>0) { current_parent->set_state(-1, SPLIT); } else if(current_parent->positivechildren.size()>0) { current_parent->set_state(1, SPLIT); } else { current_parent->raise_multiplicity(); } current_parent->positivechildren.clear(); current_parent->negativechildren.clear(); current_parent->neutralchildren.clear(); current_parent->totallyneutralchildren.clear(); current_parent->totallyneutralgrandchildren.clear(); current_parent->totally_neutral = NULL; } } } if(is_last) { send_state_message(current_parent,shouldsplit,shouldmerge,level+1); } } } } public: /// A default constructor creates an emlig with predefined statistic representing only the range of the given /// parametric space, where the number of parameters of the needed model is given as a parameter to the constructor. emlig(int number_of_parameters) { create_statistic(number_of_parameters); for(int i = 0;i empty_split; vector empty_merge; for_splitting.push_back(empty_split); for_merging.push_back(empty_merge); } } /// A constructor for creating an emlig when the user wants to create the statistic by himself. The creation of a /// statistic is needed outside the constructor. Used for a user defined prior distribution on the parameters. emlig(vector> statistic) { this->statistic = statistic; } void add_condition(vec toadd) { vec null_vector = ""; add_and_remove_condition(toadd, null_vector); } void remove_condition(vec toremove) { vec null_vector = ""; add_and_remove_condition(null_vector, toremove); } void add_and_remove_condition(vec toadd, vec toremove) { bool should_remove = (toremove.size() != 0); bool should_add = (toadd.size() != 0); vector::iterator toremove_ref = conditions.end(); bool condition_should_be_added = false; for(vector::iterator ref = conditions.begin();refvalue == toremove) { if((*ref)->multiplicity>1) { (*ref)->multiplicity--; alter_toprow_conditions(toremove,false); should_remove = false; } else { toremove_ref = ref; } } } if(should_add) { if((*ref)->value == toadd) { (*ref)->multiplicity++; alter_toprow_conditions(toadd,true); should_add = false; } else { condition_should_be_added = true; } } } if(toremove_ref!=conditions.end()) { conditions.erase(toremove_ref); } if(condition_should_be_added) { conditions.push_back(new condition(toadd)); } for(vector::iterator horizontal_position = statistic[0].begin();horizontal_positionget_coordinates(); appended_vec.ins(0,1.0); if(should_add) { double local_condition = toadd*appended_vec; current_vertex->set_state(local_condition,SPLIT); if(local_condition == 0) { current_vertex->totally_neutral = true; current_vertex->multiplicity++; } } if(should_remove) { double local_condition = toremove*appended_vec; current_vertex->set_state(local_condition,MERGE); if(local_condition == 0) { for_merging[0].push_back(current_vertex); } } } send_state_message(current_vertex, should_add, should_remove, 0); } for(vector>::iterator vert_ref = for_splitting.begin();vert_refvertices.push_back(origin); // As a statistic, we have to create a vector of vectors of polyhedron pointers. It will then represent the Hasse // diagram. First we create a vector of polyhedrons.. vector origin_vec; // ..we fill it with the origin.. origin_vec.push_back(origin); // ..and we fill the statistic with the created vector. statistic.push_back(origin_vec); // Now we have a statistic for a zero dimensional space. Regarding to how many dimensional space we need to // describe, we have to widen the descriptional default statistic. We use an iterative procedure as follows: for(int i=0;iget_coordinates(); // And we incorporate the nonzero coordinates into the new cooordinate vectors vec origin_coord1 = concat(origin_coord,max_range); vec origin_coord2 = concat(origin_coord,-max_range); // Now we create the points vertex *new_point1 = new vertex(origin_coord1); vertex *new_point2 = new vertex(origin_coord2); //********************************************************************************************************* // The algorithm for recursive build of a new Hasse diagram representing the space structure from the old // diagram works so that you create two copies of the old Hasse diagram, you shift them up one level (points // will be segments, segments will be areas etc.) and you connect each one of the original copied polyhedrons // with its offspring by a parent-child relation. Also each of the segments in the first (second) copy is // connected to the first (second) newly created vertex by a parent-child relation. //********************************************************************************************************* // Create the vectors of vectors of pointers to polyhedrons to hold the copies of the old Hasse diagram vector> new_statistic1; vector> new_statistic2; // Copy the statistic by rows for(int j=0;j supportnew_1; vector supportnew_2; new_statistic1.push_back(supportnew_1); new_statistic2.push_back(supportnew_2); // for each polyhedron in the given row for(vector::iterator horiz_ref = statistic[j].begin();horiz_ref we loop through vertices if(j == 0) { // cast the polyhedron pointer to a vertex pointer and push a zero to its vector of coordinates ((vertex*) (*horiz_ref))->push_coordinate(0); } // if it has parents if(!(*horiz_ref)->parents.empty()) { // save the relative address of this child in a vector kids_rel_addresses of all its parents. // This information will later be used for copying the whole Hasse diagram with each of the // relations contained within. for(vector::iterator parent_ref = (*horiz_ref)->parents.begin();parent_ref < (*horiz_ref)->parents.end();parent_ref++) { (*parent_ref)->kids_rel_addresses.push_back(element_number); } } // ************************************************************************************************** // Here we begin creating a new polyhedron, which will be a copy of the old one. Each such polyhedron // will be created as a toprow, but this information will be later forgotten and only the polyhedrons // in the top row of the Hasse diagram will be considered toprow for later use. // ************************************************************************************************** // First we create vectors specifying a toprow condition. In the case of a preconstructed statistic // this condition will be a vector of zeros. There are two vectors, because we need two copies of // the original Hasse diagram. vec vec1(i+2); vec1.zeros(); vec vec2(i+2); vec2.zeros(); // We create a new toprow with the previously specified condition. toprow *current_copy1 = new toprow(vec1); toprow *current_copy2 = new toprow(vec2); // The vertices of the copies will be inherited, because there will be a parent/child relation // between each polyhedron and its offspring (comming from the copy) and a parent has all the // vertices of its child plus more. for(vector::iterator vert_ref = (*horiz_ref)->vertices.begin();vert_ref<(*horiz_ref)->vertices.end();vert_ref++) { current_copy1->vertices.push_back(*vert_ref); current_copy2->vertices.push_back(*vert_ref); } // The only new vertex of the offspring should be the newly created point. current_copy1->vertices.push_back(new_point1); current_copy2->vertices.push_back(new_point2); // This method guarantees that each polyhedron is already triangulated, therefore its triangulation // is only one set of vertices and it is the set of all its vertices. current_copy1->triangulations.push_back(current_copy1->vertices); current_copy2->triangulations.push_back(current_copy2->vertices); // Now we have copied the polyhedron and we have to copy all of its relations. Because we are copying // in the Hasse diagram from bottom up, we always have to copy the parent/child relations to all the // kids and when we do that and know the child, in the child we will remember the parent we came from. // This way all the parents/children relations are saved in both the parent and the child. if(!(*horiz_ref)->kids_rel_addresses.empty()) { for(vector::iterator kid_ref = (*horiz_ref)->kids_rel_addresses.begin();kid_ref<(*horiz_ref)->kids_rel_addresses.end();kid_ref++) { // find the child and save the relation to the parent current_copy1->children.push_back(new_statistic1[j-1][(*kid_ref)]); current_copy2->children.push_back(new_statistic2[j-1][(*kid_ref)]); // in the child save the parents' address new_statistic1[j-1][(*kid_ref)]->parents.push_back(current_copy1); new_statistic2[j-1][(*kid_ref)]->parents.push_back(current_copy2); } // Here we clear the parents kids_rel_addresses vector for later use (when we need to widen the // Hasse diagram again) (*horiz_ref)->kids_rel_addresses.clear(); } // If there were no children previously, we are copying a polyhedron that has been a vertex before. // In this case it is a segment now and it will have a relation to its mother (copywise) and to the // newly created point. Here we create the connection to the new point, again from both sides. else { // Add the address of the new point in the former vertex current_copy1->children.push_back(new_point1); current_copy2->children.push_back(new_point2); // Add the address of the former vertex in the new point new_point1->parents.push_back(current_copy1); new_point2->parents.push_back(current_copy2); } // Save the mother in its offspring current_copy1->children.push_back(*horiz_ref); current_copy2->children.push_back(*horiz_ref); // Save the offspring in its mother (*horiz_ref)->parents.push_back(current_copy1); (*horiz_ref)->parents.push_back(current_copy2); // Add the copies into the relevant statistic. The statistic will later be appended to the previous // Hasse diagram new_statistic1[j].push_back(current_copy1); new_statistic2[j].push_back(current_copy2); // Raise the count in the vector of polyhedrons element_number++; } } statistic[0].push_back(new_point1); statistic[0].push_back(new_point2); // Merge the new statistics into the old one. This will either be the final statistic or we will // reenter the widening loop. for(int j=0;j support; statistic.push_back(support); } statistic[j+1].insert(statistic[j+1].end(),new_statistic1[j].begin(),new_statistic1[j].end()); statistic[j+1].insert(statistic[j+1].end(),new_statistic2[j].begin(),new_statistic2[j].end()); } } } }; /* //! Robust Bayesian AR model for Multicriteria-Laplace-Inverse-Gamma density class RARX : public BM { private: emlig posterior; public: RARX():BM() { }; void bayes(const itpp::vec &yt, const itpp::vec &cond = empty_vec) { } };*/ #endif //TRAGE_H