Changeset 1384 for applications/robust
- Timestamp:
- 09/01/11 20:13:22 (13 years ago)
- Location:
- applications/robust
- Files:
-
- 3 modified
Legend:
- Unmodified
- Added
- Removed
-
applications/robust/main.cpp
r1383 r1384 27 27 //const int utility_constant = 5; 28 28 29 const int max_model_order = 1;30 const double apriorno = 0.0 1;29 const int max_model_order = 2; 30 const double apriorno = 0.001; 31 31 32 32 /* … … 99 99 if(has_constant) 100 100 { 101 my_rarx = new RARX(ar_components.size()+1,window_size,true );101 my_rarx = new RARX(ar_components.size()+1,window_size,true,apriorno*sqrt(2.0),apriorno*sqrt(2.0),ar_components.size()+4); 102 102 my_arx = NULL; 103 103 } 104 104 else 105 105 { 106 my_rarx = new RARX(ar_components.size(),window_size,false );106 my_rarx = new RARX(ar_components.size(),window_size,false,apriorno*sqrt(2.0),apriorno*sqrt(2.0),ar_components.size()+3); 107 107 my_arx = NULL; 108 108 } … … 117 117 { 118 118 V0 = apriorno * eye(ar_components.size()+2); //aj tu konst 119 V0(0,0) = 0;119 //V0(0,0) = 0; 120 120 my_arx->set_constant(true); 121 121 } … … 124 124 125 125 V0 = apriorno * eye(ar_components.size()+1);//menit konstantu 126 V0(0,0) = 0;126 //V0(0,0) = 0; 127 127 my_arx->set_constant(false); 128 128 129 129 } 130 130 131 my_arx->set_statistics(1, V0, V0.rows()+ 1);131 my_arx->set_statistics(1, V0, V0.rows()+2); 132 132 my_arx->set_parameters(window_size); 133 133 my_arx->validate(); … … 274 274 vector<vector<string>> strings; 275 275 276 char* file_string = "c:\\ar_normal_single"; // "c:\\dataTYClosePercDiff"; //276 char* file_string = "C:\\Users\\Hontik\\Desktop\\Bayes\\dataADClosePercDiff"; // "C:\\Users\\Hontik\\Desktop\\Bayes\\ar_normal_single"; // 277 277 278 278 char dfstring[80]; … … 316 316 for(int window_size = 50;window_size < 51;window_size++) 317 317 { 318 //models.push_back(new model((*model_type),true,true,window_size,0,&data_matrix)); // to su len konstruktory, len inicializujeme rozne typy319 //models.push_back(new model((*model_type),false,true,window_size,0,&data_matrix));318 models.push_back(new model((*model_type),true,true,window_size,0,&data_matrix)); // to su len konstruktory, len inicializujeme rozne typy 319 models.push_back(new model((*model_type),false,true,window_size,0,&data_matrix)); 320 320 models.push_back(new model((*model_type),true,false,window_size,0,&data_matrix)); 321 //models.push_back(new model((*model_type),false,false,window_size,0,&data_matrix));321 models.push_back(new model((*model_type),false,false,window_size,0,&data_matrix)); 322 322 } 323 323 … … 349 349 if((*model_ref)->my_rarx!=NULL) //vklada normalizacnz faktor do cur_res_lognc 350 350 { 351 cur_res_lognc.ins(cur_res_lognc.size(),(*model_ref)->my_rarx->posterior-> log_nc);351 cur_res_lognc.ins(cur_res_lognc.size(),(*model_ref)->my_rarx->posterior->_ll()); 352 352 } 353 353 else 354 354 { 355 cur_res_lognc.ins(cur_res_lognc.size(),(*model_ref)->my_arx-> posterior().lognc());355 cur_res_lognc.ins(cur_res_lognc.size(),(*model_ref)->my_arx->_ll()); 356 356 } 357 357 … … 382 382 // result_preds.ins_col(result_preds.cols(),preds); 383 383 384 // cout << "Updated." << endl; 385 386 387 384 // cout << "Updated." << endl; 388 385 389 386 -
applications/robust/robustlib.cpp
r1383 r1384 122 122 } 123 123 124 toprow* as_toprow = (toprow*)this; 124 toprow* as_toprow = (toprow*)this; 125 126 //cout << "Current condition:" << as_toprow->condition_sum << endl; 125 127 126 128 int dimension = simplex->vertices.size()-1; … … 143 145 extended_coords.ins(0,-1); 144 146 145 cout << "Ext. coords:" << extended_coords << "Condition sum:" << as_toprow->condition_sum << endl;147 //cout << "Ext. coords:" << extended_coords << " Condition sum:" << as_toprow->condition_sum << endl; 146 148 147 149 double a = extended_coords*as_toprow->condition_sum; … … 154 156 //cout << "a0:" << a_0 << " a0 coords:" << base_vertex->get_coordinates() << " am:" << a_m << " am coords:" << (*vert_ref)->get_coordinates() << endl; 155 157 156 cout << "Absolute coords:(V" << row_count << ")" << (*vert_ref)->get_coordinates() << endl;158 //cout << "Absolute coords:(V" << row_count << ")" << (*vert_ref)->get_coordinates() << endl; 157 159 //cout << "Relative coords:(V" << row_count << ")" << relative_coords << endl; 158 160 … … 257 259 258 260 simplex->probability = int_value; 259 cout << "Probability:" << int_value << endl;261 //cout << "Probability:" << int_value << endl; 260 262 return int_value; 261 263 } -
applications/robust/robustlib.h
r1383 r1384 583 583 bool operator>(const my_ivec &second) const 584 584 { 585 return max(*this)>max(second); 586 587 /* 588 int size1 = this->size(); 589 int size2 = second.size(); 590 591 int counter1 = 0; 592 while(0==0) 593 { 594 if((*this)[counter1]==0) 595 { 596 size1--; 597 } 598 599 if((*this)[counter1]!=0) 600 break; 601 602 counter1++; 603 } 604 605 int counter2 = 0; 606 while(0==0) 607 { 608 if(second[counter2]==0) 609 { 610 size2--; 611 } 612 613 if(second[counter2]!=0) 614 break; 615 616 counter2++; 617 } 618 619 if(size1!=size2) 620 { 621 return size1>size2; 622 } 623 else 624 { 625 for(int i = 0;i<size1;i++) 626 { 627 if((*this)[counter1+i]!=second[counter2+i]) 628 { 629 return (*this)[counter1+i]>second[counter2+i]; 630 } 631 } 632 633 return false; 634 }*/ 635 } 636 585 return max(*this)>max(second); 586 } 637 587 638 588 bool operator==(const my_ivec &second) const 639 589 { 640 return max(*this)==max(second); 641 642 /* 643 int size1 = this->size(); 644 int size2 = second.size(); 645 646 int counter = 0; 647 while(0==0) 648 { 649 if((*this)[counter]==0) 650 { 651 size1--; 652 } 653 654 if((*this)[counter]!=0) 655 break; 656 657 counter++; 658 } 659 660 counter = 0; 661 while(0==0) 662 { 663 if(second[counter]==0) 664 { 665 size2--; 666 } 667 668 if(second[counter]!=0) 669 break; 670 671 counter++; 672 } 673 674 if(size1!=size2) 675 { 676 return false; 677 } 678 else 679 { 680 for(int i=0;i<size1;i++) 681 { 682 if((*this)[size()-1-i]!=second[second.size()-1-i]) 683 { 684 return false; 685 } 686 } 687 688 return true; 689 }*/ 590 return max(*this)==max(second); 690 591 } 691 592 … … 798 699 799 700 800 701 /// A method for recursive classification of polyhedrons with respect to SPLITting and MERGEing conditions. 801 702 void send_state_message(polyhedron* sender, condition *toadd, condition *toremove, int level) 802 703 { 803 704 705 // We translate existence of toremove and toadd conditions to booleans for ease of manipulation 804 706 bool shouldmerge = (toremove != NULL); 805 707 bool shouldsplit = (toadd != NULL); 806 708 709 // If such operation is desired, in the following cycle we send a message about polyhedrons classification 710 // to all its parents. We loop through the parents and report the child sending its message. 807 711 if(shouldsplit||shouldmerge) 808 712 { 809 713 for(list<polyhedron*>::iterator parent_iterator = sender->parents.begin();parent_iterator!=sender->parents.end();parent_iterator++) 810 714 { 715 // We set an individual pointer to the value at parent_iterator for ease of use 811 716 polyhedron* current_parent = *parent_iterator; 812 717 718 // The message_counter counts the number of messages received by the parent 813 719 current_parent->message_counter++; 814 720 721 // If the child is the last one to send its message, the parent can as well be classified and 722 // send its message further up. 815 723 bool is_last = (current_parent->message_counter == current_parent->number_of_children()); 724 725 // Certain properties need to be set if this is the first message received by the parent 816 726 bool is_first = (current_parent->message_counter == 1); 817 727 728 // This boolean watches for polyhedrons that are already out of the game for further MERGEing 729 // and SPLITting purposes. This may seem quite straightforward at first, but because of all 730 // the operations involved it may be quite complicated. For example a polyhedron laying in the 731 // positive side of the MERGEing hyperplane should not be split, because it lays in the positive 732 // part of the location parameter space relative to the SPLITting hyperplane, but because it 733 // is merged with its MERGE negative counterpart, which is being SPLIT, the polyhedron itself 734 // will be SPLIT after it has been merged and needs to retain all properties needed for the 735 // purposes of SPLITting. 818 736 bool out_of_the_game = true; 819 737 820 738 if(shouldmerge) 821 739 { 740 // get the MERGE state of the child 822 741 int child_state = sender->get_state(MERGE); 742 // get the MERGE state of the parent so far, the parent can be partially classified 823 743 int parent_state = current_parent->get_state(MERGE); 824 744 745 // In case this is the first message received by the parent, its state has not been set yet 746 // and therefore it inherits the MERGE state of the child. On the other hand if the state 747 // of the parent is 0, all the children so far were neutral and if the next child isn't 748 // neutral the parent should be in state of the child again. 825 749 if(parent_state == 0||is_first) 826 750 { … … 828 752 } 829 753 754 // If a child is contained in the hyperplane of a condition that should be removed and it is 755 // not of multiplicity higher than 1, it will later serve as a merger for two of its parents 756 // each lying on one side of the removed hyperplane (one being classified MERGE positive, the 757 // other MERGE negative). Here we set the possible merger candidates. 830 758 if(child_state == 0) 831 759 { … … 836 764 } 837 765 766 // If the parent obtained a message from the last one of its children we have to classify it 767 // with respect to the MERGE condition. 838 768 if(is_last) 839 769 { 770 // If the parent is a toprow from the top row of the Hasse diagram, we alter the condition 771 // sum and condition order with respect to on which side of the cutting hyperplane the 772 // toprow is located. 840 773 if(level == number_of_parameters-1) 841 774 { 775 // toprow on the positive side 842 776 if(parent_state == 1) 843 777 { … … 845 779 } 846 780 781 // toprow on the negative side 847 782 if(parent_state == -1) 848 783 { … … 851 786 } 852 787 788 // lowering the condition order. 789 // REMARK: This maybe could be done more globally for the whole statistic. 853 790 ((toprow*)current_parent)->condition_order--; 854 791 855 792 // If the parent is a candidate for being MERGEd 856 793 if(current_parent->mergechild != NULL) 857 794 { 795 // It might not be out of the game 858 796 out_of_the_game = false; 859 797 798 // If the mergechild multiplicity is 1 it will disappear after merging 860 799 if(current_parent->mergechild->get_multiplicity()==1) 861 800 { 801 // and because we need the child to have an address of the two parents it is 802 // supposed to merge, we assign the address of current parent to one of the 803 // two pointers existing in the child for this purpose regarding to its position 804 // in the location parameter space with respect to the MERGE hyperplane. 862 805 if(parent_state > 0) 863 806 { … … 872 815 else 873 816 { 817 // If the mergechild has higher multiplicity, it will not disappear after the 818 // condition is removed and the parent will still be out of the game, because 819 // no MERGEing will occur. 874 820 out_of_the_game = true; 875 821 } 876 822 } 877 823 824 // If so far the parent is out of the game, it is the toprow polyhedron and there will 825 // be no SPLITting, we compute its probability integral by summing all the integral 826 // from the simplices contained in it. 878 827 if(out_of_the_game) 879 828 { 880 //current_parent->set_state(0,MERGE);881 882 829 if((level == number_of_parameters - 1) && (!shouldsplit)) 883 830 { 884 831 toprow* cur_par_toprow = ((toprow*)current_parent); 885 cur_par_toprow->probability = 0.0; 886 887 //set<simplex*> new_triangulation; 832 cur_par_toprow->probability = 0.0; 888 833 889 834 for(set<simplex*>::iterator s_ref = current_parent->triangulation.begin();s_ref!=current_parent->triangulation.end();s_ref++) … … 891 836 double cur_prob = cur_par_toprow->integrate_simplex((*s_ref),'C'); 892 837 893 cur_par_toprow->probability += cur_prob; 894 895 //new_triangulation.insert(pair<double,set<vertex*>>(cur_prob,(*t_ref).second)); 838 cur_par_toprow->probability += cur_prob; 896 839 } 897 840 898 normalization_factor += cur_par_toprow->probability; 899 900 //current_parent->triangulation.clear(); 901 //current_parent->triangulation.insert(new_triangulation.begin(),new_triangulation.end()); 841 normalization_factor += cur_par_toprow->probability; 902 842 } 903 843 } 904 844 845 // If the parent is classified MERGE neutral, it will serve as a merger for two of its 846 // parents so we report it to the for_merging list. 905 847 if(parent_state == 0) 906 848 { 907 for_merging[level+1].push_back(current_parent); 908 //current_parent->parentconditions.erase(toremove); 909 } 910 911 849 for_merging[level+1].push_back(current_parent); 850 } 912 851 } 913 852 } 914 853 854 // In the second part of the classification procedure, we will classify the parent polyhedron 855 // for the purposes of SPLITting. Since splitting comes from a parent that is being split by 856 // creating a neutral child that cuts the split polyhedron in two parts, the created child has 857 // to be connected to all the neutral grandchildren of the source parent. We therefore have to 858 // report all such grandchildren of the parent. More complication is brought in by grandchildren 859 // that have not been created in the process of splitting, but were classified SPLIT neutral 860 // already in the classification stage. Such grandchildren and children were already present 861 // in the Hasse diagram befor the SPLITting condition emerged. We call such object totallyneutral. 862 // They have to be watched and treated separately. 915 863 if(shouldsplit) 916 864 { 865 // We report the totally neutral children of the message sending child into the totally neutral 866 // grandchildren list of current parent. 917 867 current_parent->totallyneutralgrandchildren.insert(sender->totallyneutralchildren.begin(),sender->totallyneutralchildren.end()); 918 868 869 // We need to have the pointers from grandchildren to grandparents as well, we therefore set 870 // the opposite relation as well. 919 871 for(set<polyhedron*>::iterator tot_child_ref = sender->totallyneutralchildren.begin();tot_child_ref!=sender->totallyneutralchildren.end();tot_child_ref++) 920 872 { … … 922 874 } 923 875 876 // If this is the first child to report its total neutrality, the parent inherits its state. 924 877 if(current_parent->totally_neutral == NULL) 925 878 { 926 879 current_parent->totally_neutral = sender->totally_neutral; 927 880 } 881 // else the parent is totally neutral only if all the children up to now are totally neutral. 928 882 else 929 883 { … … 931 885 } 932 886 887 // For splitting purposes, we have to mark all the children of the given parent by their SPLIT 888 // state, because when we split the parent, we create its positive and negative offsprings and 889 // its children have to be assigned accordingly. 933 890 switch(sender->get_state(SPLIT)) 934 891 { 935 892 case 1: 893 // child classified positive 936 894 current_parent->positivechildren.push_back(sender); 895 896 // all the vertices of the positive child are assigned to the positive and neutral vertex 897 // set 937 898 current_parent->positiveneutralvertices.insert(sender->vertices.begin(),sender->vertices.end()); 938 899 break; 939 900 case 0: 901 // child classified neutral 940 902 current_parent->neutralchildren.push_back(sender); 941 903 904 // all the vertices of the neutral child are assigned to both negative and positive vertex 905 // sets 942 906 if(level!=0) 943 907 { … … 951 915 } 952 916 917 // if the child is totally neutral it is also assigned to the totallyneutralchildren 953 918 if(sender->totally_neutral) 954 919 { … … 958 923 break; 959 924 case -1: 925 // child classified negative 960 926 current_parent->negativechildren.push_back(sender); 961 927 current_parent->negativeneutralvertices.insert(sender->vertices.begin(),sender->vertices.end()); … … 963 929 } 964 930 931 // If the last child has sent its message to the parent, we have to decide if the polyhedron 932 // needs to be split. 965 933 if(is_last) 966 934 { 967 935 // If the polyhedron extends to both sides of the cutting hyperplane it needs to be SPLIT. Such 936 // situation occurs if either the polyhedron has negative and also positive children or 937 // if the polyhedron contains neutral children that cross the cutting hyperplane. Such 938 // neutral children cannot be totally neutral, since totally neutral children lay within 939 // the cutting hyperplane. If the polyhedron is to be cut its state is set to SPLIT neutral 968 940 if((current_parent->negativechildren.size()>0&¤t_parent->positivechildren.size()>0) 969 941 ||(current_parent->neutralchildren.size()>0&¤t_parent->totallyneutralchildren.empty())) 970 942 { 971 for_splitting[level+1].push_back(current_parent); 972 943 for_splitting[level+1].push_back(current_parent); 973 944 current_parent->set_state(0, SPLIT); 974 945 } 975 946 else 976 947 { 948 // Else if the polyhedron has a positive number of negative children we set its state 949 // to SPLIT negative. In such a case we subtract current condition from the overall 950 // condition sum 977 951 if(current_parent->negativechildren.size()>0) 978 952 { 953 // set the state 979 954 current_parent->set_state(-1, SPLIT); 980 955 956 // alter the condition sum 981 957 if(level == number_of_parameters-1) 982 958 { 983 959 ((toprow*)current_parent)->condition_sum-=toadd->value; 984 } 985 960 } 986 961 } 962 // If the polyhedron has a positive number of positive children we set its state 963 // to SPLIT positive. In such a case we add current condition to the overall 964 // condition sum 987 965 else if(current_parent->positivechildren.size()>0) 988 966 { 967 // set the state 989 968 current_parent->set_state(1, SPLIT); 990 969 970 // alter the condition sum 991 971 if(level == number_of_parameters-1) 992 972 { … … 994 974 } 995 975 } 976 // Else the polyhedron only has children that are totally neutral. In such a case, 977 // we mark it totally neutral as well and insert the SPLIT condition into the 978 // parent conditions of the polyhedron. No addition or subtraction is needed in 979 // this case. 996 980 else 997 981 { … … 1001 985 } 1002 986 987 // In either case we raise the condition order (statistical condition sum significance) 1003 988 ((toprow*)current_parent)->condition_order++; 1004 989 990 // In case the polyhedron is a toprow and it will not be SPLIT, we compute its probability 991 // integral with the altered condition. 1005 992 if(level == number_of_parameters - 1 && current_parent->mergechild == NULL) 1006 993 { 1007 994 toprow* cur_par_toprow = ((toprow*)current_parent); 1008 cur_par_toprow->probability = 0.0; 1009 1010 //map<double,set<vertex*>> new_triangulation; 995 cur_par_toprow->probability = 0.0; 1011 996 997 // We compute the integral as a sum over all simplices contained within the 998 // polyhedron. 1012 999 for(set<simplex*>::iterator s_ref = current_parent->triangulation.begin();s_ref!=current_parent->triangulation.end();s_ref++) 1013 1000 { … … 1015 1002 1016 1003 cur_par_toprow->probability += cur_prob; 1017 1018 //new_triangulation.insert(pair<double,set<vertex*>>(cur_prob,(*t_ref).second));1019 1004 } 1020 1005 1021 1006 normalization_factor += cur_par_toprow->probability; 1022 1023 //current_parent->triangulation.clear();1024 //current_parent->triangulation.insert(new_triangulation.begin(),new_triangulation.end());1025 1007 } 1026 1008 1009 // If the parent polyhedron is out of the game, so that it will not be MERGEd or 1010 // SPLIT any more, we will reset the lists specifying its relation with respect 1011 // to the SPLITting condition, so that they will be clear for future use. 1027 1012 if(out_of_the_game) 1028 1013 { 1029 1014 current_parent->positivechildren.clear(); 1030 1015 current_parent->negativechildren.clear(); 1031 current_parent->neutralchildren.clear(); 1032 //current_parent->totallyneutralchildren.clear(); 1033 current_parent->totallyneutralgrandchildren.clear(); 1034 // current_parent->grandparents.clear(); 1016 current_parent->neutralchildren.clear(); 1017 current_parent->totallyneutralgrandchildren.clear(); 1035 1018 current_parent->positiveneutralvertices.clear(); 1036 1019 current_parent->negativeneutralvertices.clear(); … … 1042 1025 } 1043 1026 1027 // Finally if the the parent polyhedron has been SPLIT and MERGE classified, we will send a message 1028 // about its classification to its parents. 1044 1029 if(is_last) 1045 1030 { … … 1052 1037 } 1053 1038 1039 // We clear the totally neutral children of the child here, because we needed them to be assigned as 1040 // totally neutral grandchildren to all its parents. 1054 1041 sender->totallyneutralchildren.clear(); 1055 1042 } … … 1073 1060 /// A default constructor creates an emlig with predefined statistic representing only the range of the given 1074 1061 /// parametric space, where the number of parameters of the needed model is given as a parameter to the constructor. 1075 emlig(int number_of_parameters, double soft_prior_parameter)1062 emlig(int number_of_parameters, double alpha_deviation, double sigma_deviation, int nu) 1076 1063 { 1077 1064 this->number_of_parameters = number_of_parameters; 1078 1065 1079 condition_order = nu mber_of_parameters+2;1066 condition_order = nu; 1080 1067 1081 create_statistic(number_of_parameters, soft_prior_parameter);1068 create_statistic(number_of_parameters, alpha_deviation, sigma_deviation); 1082 1069 1083 1070 //step_me(10); … … 1097 1084 } 1098 1085 1099 log_nc = log(normalization_factor); 1100 1101 /* 1102 cout << "part1: " << log(normalization_factor) << endl; 1103 cout << "part2: " << logfact(condition_order-number_of_parameters-2) << endl; 1104 pause(1); 1105 */ 1086 last_log_nc = NULL; 1087 log_nc = log(normalization_factor); 1106 1088 1107 1089 cout << "Prior constructed." << endl; … … 1266 1248 void add_and_remove_condition(vec toadd, vec toremove) 1267 1249 { 1250 1251 // New condition arrived (new data are available). Here we will perform the Bayesian data update 1252 // step by splitting the location parameter space with respect to the new condition and computing 1253 // normalization integrals for each polyhedron in the location parameter space. 1268 1254 1269 //step_me(0); 1255 // First we reset previous value of normalization factor and maximum value of the log likelihood. 1256 // Because there is a minus sign in the exponent of the likelihood, we really search for a minimum 1257 // and here we set min_ll to a high value. 1270 1258 normalization_factor = 0; 1271 1259 min_ll = numeric_limits<double>::max(); 1272 1260 1261 // We translate the presence of a condition to add to a boolean. Also, if moving window version of 1262 // data update is used, we check for the presence of a condition to be removed from consideration. 1263 // To take care of addition and deletion of a condition in one method is computationally better than 1264 // treating both cases separately. 1273 1265 bool should_remove = (toremove.size() != 0); 1274 1266 bool should_add = (toadd.size() != 0); 1275 1267 1268 // We lower the number of conditions so far considered if we remove one. 1276 1269 if(should_remove) 1277 1270 { … … 1279 1272 } 1280 1273 1274 // We raise the number of conditions so far considered if we add one. 1281 1275 if(should_add) 1282 1276 { … … 1284 1278 } 1285 1279 1280 // We erase the support lists used in splitting/merging operations later on to keep track of the 1281 // split/merged polyhedrons. 1286 1282 for_splitting.clear(); 1287 1283 for_merging.clear(); 1288 1284 1285 // This is a somewhat stupid operation, where we fill the vector of lists by empty lists, so that 1286 // we can extend the lists contained in the vector later on. 1289 1287 for(int i = 0;i<statistic.size();i++) 1290 1288 { … … 1296 1294 } 1297 1295 1298 list<condition*>::iterator toremove_ref = conditions.end(); 1299 bool condition_should_be_added = should_add; 1300 1296 // We set`the iterator in the conditions list to a blind end() iterator 1297 list<condition*>::iterator toremove_ref = conditions.end(); 1298 1299 // We search the list of conditions for existence of toremove and toadd conditions and check their 1300 // possible multiplicity. 1301 1301 for(list<condition*>::iterator ref = conditions.begin();ref!=conditions.end();ref++) 1302 1302 { 1303 // If condition should be removed.. 1303 1304 if(should_remove) 1304 1305 { 1306 // if it exists in the list 1305 1307 if((*ref)->value == toremove) 1306 1308 { 1309 // if it has multiplicity higher than 1 1307 1310 if((*ref)->multiplicity>1) 1308 1311 { 1312 // we just lower the multiplicity 1309 1313 (*ref)->multiplicity--; 1310 1314 1315 // In this case the parameter space remains unchanged (we have to process no merging), 1316 // so we only alter the condition sums in all the cells and compute the integrals 1317 // over the cells with the subtracted condition 1311 1318 alter_toprow_conditions(*ref,false); 1312 1319 1320 // By altering the condition sums in each individual unchanged cell, we have finished 1321 // all the tasks of this method related to merging and removing given condition. Therefore 1322 // we switch the should_remove switch to false. 1313 1323 should_remove = false; 1314 1324 } 1315 1325 else 1316 1326 { 1327 // In case the condition to be removed has a multiplicity of 1, we mark its position in 1328 // the vector of conditions by assigning its iterator to toremove_ref variable. 1317 1329 toremove_ref = ref; 1318 } 1330 } 1319 1331 } 1320 1332 } 1321 1333 1334 // If a condition should be added.. 1322 1335 if(should_add) 1323 1336 { 1337 // We search the vector of conditions if a condition with the same value already exists. 1324 1338 if((*ref)->value == toadd) 1325 1339 { 1340 // If it does, there will be no further splitting necessary. We have to raise its multiplicity.. 1326 1341 (*ref)->multiplicity++; 1327 1342 1343 // Again as with the condition to be removed, if no splitting is performed, we only have to 1344 // perform the computations in the individual cells in the top row of Hasse diagram of the 1345 // complex of polyhedrons by changing the condition sums in individual cells and computing 1346 // integrals with changed condition sum. 1328 1347 alter_toprow_conditions(*ref,true); 1329 1348 1330 should_add = false;1331 1332 condition_should_be_added = false;1349 // We switch off any further operations on the complex by switching the should_add variable 1350 // to false. 1351 should_add = false; 1333 1352 } 1334 1353 } 1335 1354 } 1336 1355 1356 // Here we erase the removed condition from the conditions vector and assign a pointer to the 1357 // condition object of the removed condition, if there is such, else the pointer remains NULL. 1337 1358 condition* condition_to_remove = NULL; 1338 1339 if(toremove_ref!=conditions.end()) 1340 { 1341 condition_to_remove = *toremove_ref; 1342 conditions.erase(toremove_ref); 1343 } 1344 1359 if(should_remove) 1360 { 1361 if(toremove_ref!=conditions.end()) 1362 { 1363 condition_to_remove = *toremove_ref; 1364 conditions.erase(toremove_ref); 1365 } 1366 } 1367 1368 // Here we create the condition object for a condition value to be added and we insert it in 1369 // the list of conditions in case new condition should be added, else the pointer is set to NULL. 1345 1370 condition* condition_to_add = NULL; 1346 1347 if(condition_should_be_added) 1348 { 1349 condition* new_condition = new condition(toadd); 1350 1351 conditions.push_back(new_condition); 1352 condition_to_add = new_condition; 1371 if(should_add) 1372 { 1373 condition_to_add = new condition(toadd); 1374 conditions.push_back(new_condition); 1353 1375 } 1354 1376 1377 //********************************************************************************************** 1378 // Classification of points related to added and removed conditions 1379 //********************************************************************************************** 1380 // Here the preliminary and preparation part ends and we begin classifying individual vertices in 1381 // the bottom row of the representing Hasse diagram relative to the condition to be removed and the 1382 // one to be added. This classification proceeds further in a recursive manner. Each classified 1383 // polyhedron sends an information about its classification to its parent, when all the children of 1384 // given parents are classified, the parent can be itself classified and send information further to 1385 // its parent and so on. 1386 1387 // We loop through all ther vertices 1355 1388 for(polyhedron* horizontal_position = statistic.rows[0];horizontal_position!=statistic.get_end();horizontal_position=horizontal_position->next_poly) 1356 1389 { 1390 // Cast from general polyhedron to a vertex 1357 1391 vertex* current_vertex = (vertex*)horizontal_position; 1358 1392 1393 // If a condition should be added or removed.. 1359 1394 if(should_add||should_remove) 1360 1395 { 1396 // The coordinates are extended by a -1 representing there is no parameter multiplying the 1397 // regressor in the autoregressive model. The condition is passed to the method as a vector 1398 // (y_t,psi_{t-1}), where y_t is the value of regressor and psi_t is the vector of regressands. 1399 // Minus sign is needed, because the AR model equation reads y_t = theta*psi_{t-1}+e_t, which 1400 // can be rewriten as (y_t, psi_{t-1})*(-1,theta)', where ' stands for transposition and * for 1401 // scalar product 1361 1402 vec appended_coords = current_vertex->get_coordinates(); 1362 1403 appended_coords.ins(0,-1.0); … … 1364 1405 if(should_add) 1365 1406 { 1366 double local_condition = 0;// = toadd*(appended_coords.first/=appended_coords.second); 1367 1368 local_condition = appended_coords*toadd; 1369 1370 // cout << "Vertex multiplicity: "<< current_vertex->get_multiplicity() << endl; 1371 1407 // We compute the position of the vertex relative to the added condition 1408 double local_condition = appended_coords*toadd;// = toadd*(appended_coords.first/=appended_coords.second); 1409 1410 // The method set_state classifies the SPLIT state of the vertex as positive, negative or 1411 // neutral 1372 1412 current_vertex->set_state(local_condition,SPLIT); 1373 1413 1374 1414 /// \TODO There should be a rounding error tolerance used here to insure we are not having too many points because of rounding error. 1415 // If the vertex lays on the hyperplane related to the condition cutting the location parameter 1416 // space in half, we say it is totally neutral. This way it will be different than the later 1417 // newly created vertices appearing on the cuts of line segments. In an environment, where 1418 // the data variables are continuous (they don't have positive probability mass at any point 1419 // in the data space) the occurence of a point on the cutting hyperplane has probability 0. 1420 // In real world application, where data are often discrete, we have to take such situation 1421 // into account. 1375 1422 if(local_condition == 0) 1376 1423 { 1424 // In certain scenarios this situation is rather rare. We might then want to know about 1425 // occurence of a point laying on the cutting hyperplane (Programmers note:Also such 1426 // scenarios were not so well tested and computation errors may occur!) 1377 1427 cout << "Condition to add: " << toadd << endl; 1378 1428 cout << "Vertex coords: " << appended_coords << endl; 1379 1429 1430 // We classify the vertex totally neutral 1380 1431 current_vertex->totally_neutral = true; 1381 1432 1433 // We raise its multiplicity and set current splitting condition as a parent condition 1434 // of the vertex, since if we later remove the original parent condition, the vertex 1435 // has to have a parent condition its right to exist. 1382 1436 current_vertex->raise_multiplicity(); 1383 1437 current_vertex->parentconditions.insert(condition_to_add); … … 1385 1439 else 1386 1440 { 1441 // If the vertex lays off the cutting hyperplane, we set its totally_neutral property 1442 // to false. 1387 1443 current_vertex->totally_neutral = false; 1388 1444 } 1389 1445 } 1390 1446 1447 // Now we classify the vertex with respect to the MERGEing condition.. 1391 1448 if(should_remove) 1392 1449 { 1393 set<condition*>::iterator cond_ref;1394 1450 // We search the condition to be removed in the list of vertice's parent conditions 1451 set<condition*>::iterator cond_ref; 1395 1452 for(cond_ref = current_vertex->parentconditions.begin();cond_ref!=current_vertex->parentconditions.end();cond_ref++) 1396 1453 { … … 1401 1458 } 1402 1459 1460 // If the list of parent conditions of the given vertex contain the condition that is being 1461 // removed, we erase it from the list, we set the vertice's MERGE state to neutral and we 1462 // insert the vertex into the set of polyhedrons that are supposed to be used for merging 1463 // (themselves possibly being deleted). 1464 1465 // REMARK: One may think it would be easier to check the condition again computationally. 1466 // Such design has been used before in the software, but due to rounding errors it was 1467 // very unreliable. These rounding errors are avoided using current design. 1403 1468 if(cond_ref!=current_vertex->parentconditions.end()) 1404 1469 { … … 1409 1474 else 1410 1475 { 1476 // If parent conditions of the vertex don't contain the condition to be removed, we 1477 // check in which halfspace it is located and set its MERGE state accordingly. 1411 1478 double local_condition = toremove*appended_coords; 1412 1479 current_vertex->set_state(local_condition,MERGE); … … 1415 1482 } 1416 1483 1484 // Once classified we proceed recursively by calling the send_state_message method 1417 1485 send_state_message(current_vertex, condition_to_add, condition_to_remove, 0); 1418 1486 … … 1442 1510 // cout << endl; 1443 1511 } 1444 */ 1445 1446 1447 1512 */ 1513 1514 1515 // Here we have finished the classification part and we have at hand two sets of polyhedrons used for 1516 // further operation on the location parameter space. The first operation will be merging of polyhedrons 1517 // with respect to the MERGE condition. For that purpose, we have a set of mergers in a list called 1518 // for_merging. After we are finished merging, we need to split the polyhedrons cut by the SPLIT 1519 // condition. These polyhedrons are gathered in the for_splitting list. As can be seen, the MERGE 1520 // operation is done from below, in the terms of the Hasse diagram and therefore we start to merge 1521 // from the very bottom row, from the vertices. On the other hand splitting is done from the top 1522 // and we therefore start with the segments that need to be split. 1523 1524 // We start the MERGE operation here. Some of the vertices will disappear from the Hasse diagram. 1525 // Because they are part of polyhedrons in the Hasse diagram above the segments, we need to remember 1526 // them in the separate set and get rid of them only after the process of merging all the polyhedrons 1527 // has been finished. 1448 1528 cout << "Merging." << endl; 1449 1450 1529 set<vertex*> vertices_to_be_reduced; 1451 1530 1531 // We loop through the vector list of polyhedrons for merging from the bottom row up. We keep track 1532 // of the number of the processed row. 1452 1533 int k = 1; 1453 1454 1534 for(vector<list<polyhedron*>>::iterator vert_ref = for_merging.begin();vert_ref<for_merging.end();vert_ref++) 1455 { 1535 { 1536 // Within a row we loop through all the polyhedrons that we use as mergers. 1456 1537 for(list<polyhedron*>::iterator merge_ref = (*vert_ref).begin();merge_ref!=(*vert_ref).end();merge_ref++) 1457 1538 { 1539 // *************************************************** 1540 // First we treat the case of a multiple merger. 1541 // *************************************************** 1542 1543 // If the multiplicity of the merger is greater than one, the merger will remain in the Hasse 1544 // diagram and its parents will remain split. 1458 1545 if((*merge_ref)->get_multiplicity()>1) 1459 1546 { 1547 // We remove the condition to be removed (the MERGE condition) from the list of merger's 1548 // parents. 1460 1549 (*merge_ref)->parentconditions.erase(condition_to_remove); 1461 1550 1551 // If the merger is a vertex.. 1462 1552 if(k==1) 1463 1553 { 1554 // ..we will later reduce its multiplicity (this is to prevent multiple reduction of 1555 // the same vertex) 1464 1556 vertices_to_be_reduced.insert((vertex*)(*merge_ref)); 1465 1557 } 1558 // If the merger is not a vertex.. 1466 1559 else 1467 1560 { 1561 // lower the multiplicity of the merger 1468 1562 (*merge_ref)->lower_multiplicity(); 1469 1563 } 1470 1564 1565 // If the merger will not be split and it is not totally neutral with respect to SPLIT 1566 // condition (it doesn't lay in the hyperplane defined by the condition), we will not 1567 // need it for splitting purposes and we can therefore clean all the splitting related 1568 // properties, to be able to reuse them when new data arrive. A merger is never a toprow 1569 // so we do not need to integrate. 1471 1570 if((*merge_ref)->get_state(SPLIT)!=0||(*merge_ref)->totally_neutral) 1472 1571 { … … 1480 1579 (*merge_ref)->kids_rel_addresses.clear(); 1481 1580 } 1482 } 1581 } 1582 // Else, if the multiplicity of the merger is equal to 1, we proceed with the merging part of 1583 // the algorithm. 1483 1584 else 1484 1585 { 1586 // A boolean that will be true, if after being merged, the new polyhedron should be split 1587 // in the next step of the algorithm. 1485 1588 bool will_be_split = false; 1486 1589 1590 // The newly created polyhedron will be merged of a negative and positive part specified 1591 // by its merger. 1487 1592 toprow* current_positive = (toprow*)(*merge_ref)->positiveparent; 1488 1593 toprow* current_negative = (toprow*)(*merge_ref)->negativeparent; 1489 1594 1595 // An error check for situation that should not occur. 1490 1596 if(current_positive->totally_neutral!=current_negative->totally_neutral) 1491 1597 { 1492 1598 throw new exception("Both polyhedrons must be totally neutral if they should be merged!"); 1493 } 1494 1495 //current_positive->condition_sum -= toremove; 1496 //current_positive->condition_order--; 1497 1599 } 1600 1601 // ************************************************************************************* 1602 // Now we rewire the Hasse properties of the MERGE negative part of the merged 1603 // polyhedron to the MERGE positive part - it will be used as the merged polyhedron 1604 // ************************************************************************************* 1605 1606 // Instead of establishing a new polyhedron and filling in all the necessary connections 1607 // and thus adding it into the Hasse diagram, we use the positive polyhedron with its 1608 // connections and we merge it with all the connections from the negative side so that 1609 // the positive polyhedron becomes the merged one. 1610 1611 // We remove the MERGE condition from parent conditions. 1498 1612 current_positive->parentconditions.erase(condition_to_remove); 1499 1613 1614 // We add the children from the negative part into the children list and remove from it the 1615 // merger. 1500 1616 current_positive->children.insert(current_positive->children.end(),current_negative->children.begin(),current_negative->children.end()); 1501 1617 current_positive->children.remove(*merge_ref); 1502 1618 1619 // We reconnect the reciprocal addresses from children to parents. 1503 1620 for(list<polyhedron*>::iterator child_ref = current_negative->children.begin();child_ref!=current_negative->children.end();child_ref++) 1504 1621 { … … 1507 1624 } 1508 1625 1509 // current_positive->parents.insert(current_positive->parents.begin(),current_negative->parents.begin(),current_negative->parents.end()); 1510 // unique(current_positive->parents.begin(),current_positive->parents.end()); 1511 1626 // We loop through the parents of the negative polyhedron. 1512 1627 for(list<polyhedron*>::iterator parent_ref = current_negative->parents.begin();parent_ref!=current_negative->parents.end();parent_ref++) 1513 1628 { 1514 (*parent_ref)->children.remove(current_negative); 1515 1629 // Remove the negative polyhedron from its children 1630 (*parent_ref)->children.remove(current_negative); 1631 1632 // Remove it from the according list with respect to the negative polyhedron's 1633 // SPLIT state. 1516 1634 switch(current_negative->get_state(SPLIT)) 1517 1635 { … … 1525 1643 (*parent_ref)->positivechildren.remove(current_negative); 1526 1644 break; 1645 } 1646 } 1647 1648 // We merge the vertices of the negative and positive part 1649 current_positive->vertices.insert(current_negative->vertices.begin(),current_negative->vertices.end()); 1650 1651 // ************************************************************************** 1652 // Now we treat the situation that one of the MERGEd polyhedrons is to be 1653 // SPLIT. 1654 // ************************************************************************** 1655 1656 if(!current_positive->totally_neutral) 1657 { 1658 // If the positive polyhedron was not to be SPLIT and the negative polyhedron was.. 1659 if(current_positive->get_state(SPLIT)!=0&¤t_negative->get_state(SPLIT)==0) 1660 { 1661 //..we loop through the parents of the positive polyhedron.. 1662 for(list<polyhedron*>::iterator parent_ref = current_positive->parents.begin();parent_ref!=current_positive->parents.end();parent_ref++) 1663 { 1664 //..and if the MERGE positive polyhedron is SPLIT positive, we remove it 1665 //from the list of SPLIT positive children.. 1666 if(current_positive->get_state(SPLIT)==1) 1667 { 1668 (*parent_ref)->positivechildren.remove(current_positive); 1669 } 1670 //..or if the MERGE positive polyhedron is SPLIT negative, we remove it 1671 //from the list of SPLIT positive children.. 1672 else 1673 { 1674 (*parent_ref)->negativechildren.remove(current_positive); 1675 } 1676 //..and we add it to the SPLIT neutral children, because the MERGE negative polyhedron 1677 //that is being MERGEd with it causes it to be SPLIT neutral (the hyperplane runs 1678 //through the merged polyhedron) 1679 (*parent_ref)->neutralchildren.push_back(current_positive); 1680 } 1681 1682 // Because of the above mentioned reason, we set the SPLIT state of the MERGE positive 1683 // polyhedron to neutral 1684 current_positive->set_state(0,SPLIT); 1685 1686 for_splitting[k].remove(current_negative); 1687 // and we add it to the list of polyhedrons to be SPLIT 1688 for_splitting[k].push_back(current_positive); 1527 1689 } 1528 //(*parent_ref)->children.push_back(current_positive); 1529 } 1530 1531 if(current_positive->get_state(SPLIT)!=0&¤t_negative->get_state(SPLIT)==0) 1532 { 1533 for(list<polyhedron*>::iterator parent_ref = current_positive->parents.begin();parent_ref!=current_positive->parents.end();parent_ref++) 1690 1691 1692 // If the MERGEd polyhedron is to be split.. 1693 if(current_positive->get_state(SPLIT)==0) 1534 1694 { 1535 if(current_positive->get_state(SPLIT)==1) 1695 // We need to fill the lists related to split with correct values, adding the SPLIT 1696 // positive, negative and neutral children to according list in the MERGE positive, 1697 // or future MERGEd polyhedron 1698 current_positive->negativechildren.insert(current_positive->negativechildren.end(),current_negative->negativechildren.begin(),current_negative->negativechildren.end()); 1699 current_positive->positivechildren.insert(current_positive->positivechildren.end(),current_negative->positivechildren.begin(),current_negative->positivechildren.end()); 1700 current_positive->neutralchildren.insert(current_positive->neutralchildren.end(),current_negative->neutralchildren.begin(),current_negative->neutralchildren.end()); 1701 1702 // and remove the merger, which will be later deleted from the lists of SPLIT classified 1703 // children. 1704 switch((*merge_ref)->get_state(SPLIT)) 1536 1705 { 1537 (*parent_ref)->positivechildren.remove(current_positive); 1538 } 1539 else 1540 { 1541 (*parent_ref)->negativechildren.remove(current_positive); 1542 } 1543 1544 (*parent_ref)->neutralchildren.push_back(current_positive); 1706 case -1: 1707 current_positive->negativechildren.remove(*merge_ref); 1708 break; 1709 case 0: 1710 current_positive->neutralchildren.remove(*merge_ref); 1711 break; 1712 case 1: 1713 current_positive->positivechildren.remove(*merge_ref); 1714 break; 1715 } 1716 1717 // We also have to merge the lists of totally neutral children laying in the SPLIT related 1718 // cutting hyperpalne and the lists of positive+neutral and negative+neutral vertices. 1719 current_positive->totallyneutralgrandchildren.insert(current_negative->totallyneutralgrandchildren.begin(),current_negative->totallyneutralgrandchildren.end()); 1720 // Because a vertex cannot be SPLIT, we don't need to remove the merger from the 1721 // positive+neutral and negative+neutral lists 1722 current_positive->negativeneutralvertices.insert(current_negative->negativeneutralvertices.begin(),current_negative->negativeneutralvertices.end()); 1723 current_positive->positiveneutralvertices.insert(current_negative->positiveneutralvertices.begin(),current_negative->positiveneutralvertices.end()); 1724 1725 // And we set the will be split property to true 1726 will_be_split = true; 1545 1727 } 1546 1547 current_positive->set_state(0,SPLIT);1548 for_splitting[k].push_back(current_positive);1549 1550 will_be_split = true;1551 1728 } 1552 1729 1553 if((current_positive->get_state(SPLIT)==0&&!current_positive->totally_neutral)||(current_negative->get_state(SPLIT)==0&&!current_negative->totally_neutral)) 1554 { 1555 current_positive->negativechildren.insert(current_positive->negativechildren.end(),current_negative->negativechildren.begin(),current_negative->negativechildren.end()); 1556 1557 current_positive->positivechildren.insert(current_positive->positivechildren.end(),current_negative->positivechildren.begin(),current_negative->positivechildren.end()); 1558 1559 current_positive->neutralchildren.insert(current_positive->neutralchildren.end(),current_negative->neutralchildren.begin(),current_negative->neutralchildren.end()); 1560 1561 switch((*merge_ref)->get_state(SPLIT)) 1562 { 1563 case -1: 1564 current_positive->negativechildren.remove(*merge_ref); 1565 break; 1566 case 0: 1567 current_positive->neutralchildren.remove(*merge_ref); 1568 break; 1569 case 1: 1570 current_positive->positivechildren.remove(*merge_ref); 1571 break; 1572 } 1573 1574 /* 1575 current_positive->totallyneutralchildren.insert(current_negative->totallyneutralchildren.begin(),current_negative->totallyneutralchildren.end()); 1576 1577 current_positive->totallyneutralchildren.erase(*merge_ref); 1578 */ 1579 1580 current_positive->totallyneutralgrandchildren.insert(current_negative->totallyneutralgrandchildren.begin(),current_negative->totallyneutralgrandchildren.end()); 1581 1582 current_positive->negativeneutralvertices.insert(current_negative->negativeneutralvertices.begin(),current_negative->negativeneutralvertices.end()); 1583 current_positive->positiveneutralvertices.insert(current_negative->positiveneutralvertices.begin(),current_negative->positiveneutralvertices.end()); 1584 1585 will_be_split = true; 1586 } 1587 else 1730 // If the polyhedron will not be split (both parts are totally neutral or neither of them 1731 // was classified SPLIT neutral), we clear all the lists holding the SPLIT information for 1732 // them to be ready to reuse. 1733 if(!will_be_split) 1588 1734 { 1589 1735 current_positive->positivechildren.clear(); 1590 1736 current_positive->negativechildren.clear(); 1591 current_positive->neutralchildren.clear(); 1592 // current_positive->totallyneutralchildren.clear(); 1737 current_positive->neutralchildren.clear(); 1593 1738 current_positive->totallyneutralgrandchildren.clear(); 1594 1739 current_positive->positiveneutralvertices.clear(); … … 1596 1741 current_positive->totally_neutral = NULL; 1597 1742 current_positive->kids_rel_addresses.clear(); 1598 } 1743 } 1599 1744 1600 current_positive->vertices.insert(current_negative->vertices.begin(),current_negative->vertices.end()); 1601 1602 1603 for(set<vertex*>::iterator vert_ref = (*merge_ref)->vertices.begin();vert_ref!=(*merge_ref)->vertices.end();vert_ref++) 1604 { 1605 if((*vert_ref)->get_multiplicity()==1) 1606 { 1607 current_positive->vertices.erase(*vert_ref); 1608 1609 if(will_be_split) 1610 { 1611 current_positive->negativeneutralvertices.erase(*vert_ref); 1612 current_positive->positiveneutralvertices.erase(*vert_ref); 1613 } 1614 } 1615 } 1616 1617 if(current_negative->get_state(SPLIT)==0&&!current_negative->totally_neutral) 1618 { 1619 for_splitting[k].remove(current_negative); 1620 } 1621 1622 1623 1745 // If both the merged polyhedrons are totally neutral, we have to rewire the addressing 1746 // in the grandparents from the negative to the positive (merged) polyhedron. 1624 1747 if(current_positive->totally_neutral) 1625 1748 { … … 1631 1754 } 1632 1755 1756 // We clear the grandparents list for further reuse. 1633 1757 current_positive->grandparents.clear(); 1634 1758 1759 // Triangulate the newly created polyhedron and compute its normalization integral if the 1760 // polyhedron is a toprow. 1635 1761 normalization_factor += current_positive->triangulate(k==for_splitting.size()-1 && !will_be_split); 1636 1762 1763 // Delete the negative polyhedron from the Hasse diagram (rewire all the connections) 1637 1764 statistic.delete_polyhedron(k,current_negative); 1638 1765 1766 // Delete the negative polyhedron object 1639 1767 delete current_negative; 1640 1768 1769 // ********************************************* 1770 // Here we treat the deletion of the merger. 1771 // ********************************************* 1772 1773 // We erase the vertices of the merger from all the respective lists. 1774 for(set<vertex*>::iterator vert_ref = (*merge_ref)->vertices.begin();vert_ref!=(*merge_ref)->vertices.end();vert_ref++) 1775 { 1776 if((*vert_ref)->get_multiplicity()==1) 1777 { 1778 current_positive->vertices.erase(*vert_ref); 1779 1780 if(will_be_split) 1781 { 1782 current_positive->negativeneutralvertices.erase(*vert_ref); 1783 current_positive->positiveneutralvertices.erase(*vert_ref); 1784 } 1785 } 1786 } 1787 1788 // We remove the connection to the merger from the merger's children 1641 1789 for(list<polyhedron*>::iterator child_ref = (*merge_ref)->children.begin();child_ref!=(*merge_ref)->children.end();child_ref++) 1642 1790 { 1643 1791 (*child_ref)->parents.remove(*merge_ref); 1644 } 1645 1646 /* 1647 for(list<polyhedron*>::iterator parent_ref = (*merge_ref)->parents.begin();parent_ref!=(*merge_ref)->parents.end();parent_ref++) 1648 { 1649 (*parent_ref)->positivechildren.remove(*merge_ref); 1650 (*parent_ref)->negativechildren.remove(*merge_ref); 1651 (*parent_ref)->neutralchildren.remove(*merge_ref); 1652 (*parent_ref)->children.remove(*merge_ref); 1653 } 1654 */ 1655 1792 } 1793 1794 // We remove the connection to the merger from the merger's grandchildren 1656 1795 for(set<polyhedron*>::iterator grand_ch_ref = (*merge_ref)->totallyneutralgrandchildren.begin();grand_ch_ref!=(*merge_ref)->totallyneutralgrandchildren.end();grand_ch_ref++) 1657 1796 { 1658 1797 (*grand_ch_ref)->grandparents.erase(*merge_ref); 1659 1798 } 1660 1661 1799 1800 // We remove the connection to the merger from the merger's grandparents 1662 1801 for(set<polyhedron*>::iterator grand_p_ref = (*merge_ref)->grandparents.begin();grand_p_ref!=(*merge_ref)->grandparents.end();grand_p_ref++) 1663 1802 { … … 1665 1804 } 1666 1805 1806 // We remove the merger from the Hasse diagram 1667 1807 statistic.delete_polyhedron(k-1,*merge_ref); 1668 1669 for_splitting[k-1].remove(*merge_ref); 1670 // for_merging[k].remove(*loc_merge_ref);1671 1808 // And we delete the merger from the list of polyhedrons to be split 1809 for_splitting[k-1].remove(*merge_ref); 1810 // If the merger is a vertex with multiplicity 1, we add it to the list of vertices to get 1811 // rid of at the end of the merging procedure. 1672 1812 if(k==1) 1673 1813 { 1674 1814 vertices_to_be_reduced.insert((vertex*)(*merge_ref)); 1675 } 1676 /* 1677 else 1678 { 1679 delete (*loc_merge_ref); 1680 } 1681 */ 1815 } 1682 1816 } 1683 1817 } 1684 1818 1819 // And we go to the next row 1685 1820 k++; 1686 1821 1687 1822 } 1688 1823 1824 // At the end of the merging procedure, we delete all the merger's objects. These should now be already 1825 // disconnected from the Hasse diagram. 1689 1826 for(int i = 1;i<for_merging.size();i++) 1690 1827 { … … 1695 1832 } 1696 1833 1834 // We also treat the vertices that we called to be reduced by either lowering their multiplicity or 1835 // deleting them in case the already have multiplicity 1. 1697 1836 for(set<vertex*>::iterator vert_ref = vertices_to_be_reduced.begin();vert_ref!=vertices_to_be_reduced.end();vert_ref++) 1698 1837 { … … 1707 1846 } 1708 1847 1848 // Finally we delete the condition object 1709 1849 delete condition_to_remove; 1710 1850 } 1711 1851 1712 1852 // This is a control check for errors in the merging procedure. 1853 /* 1713 1854 vector<int> sizevector; 1714 1855 for(int s = 0;s<statistic.size();s++) … … 1717 1858 cout << statistic.row_size(s) << ", "; 1718 1859 } 1719 1720 1721 cout << endl; 1722 1860 cout << endl; 1861 */ 1862 1863 // After the merging is finished or if there is no condition to be removed from the conditions list, 1864 // we split the location parameter space with respect to the condition to be added or SPLIT condition. 1723 1865 if(should_add) 1724 1866 { 1725 1867 cout << "Splitting." << endl; 1726 1868 1727 int k = 1; 1728 int counter = 0; 1729 1869 // We reset the row counter 1870 int k = 1; 1871 1872 // Since the bottom row of the for_splitting list is empty - we can't split vertices, we start from 1873 // the second row from the bottom - the row containing segments 1730 1874 vector<list<polyhedron*>>::iterator beginning_ref = ++for_splitting.begin(); 1731 1875 1876 // We loop through the rows 1732 1877 for(vector<list<polyhedron*>>::iterator vert_ref = beginning_ref;vert_ref<for_splitting.end();vert_ref++) 1733 1878 { 1734 1879 1880 // and we loop through the polyhedrons in each row 1735 1881 for(list<polyhedron*>::reverse_iterator split_ref = vert_ref->rbegin();split_ref != vert_ref->rend();split_ref++) 1736 1882 { 1737 counter++;1738 1883 // If we split a polyhedron by a SPLIT condition hyperplane, in the crossection of the two a 1884 // new polyhedron is created. It is totally neutral, because it lays in the condition hyperplane. 1739 1885 polyhedron* new_totally_neutral_child; 1740 1886 1887 // For clear notation we rename the value referenced by split_ref iterator 1741 1888 polyhedron* current_polyhedron = (*split_ref); 1742 1889 1890 // If the current polyhedron is a segment, the new totally neutral child will be a vertex and 1891 // we have to assign coordinates to it. 1743 1892 if(vert_ref == beginning_ref) 1744 1893 { 1894 // The coordinates will be computed from the equation of the straight line containing the 1895 // segment, obtained from the coordinates of the endpoints of the segment 1745 1896 vec coordinates1 = ((vertex*)(*(current_polyhedron->children.begin())))->get_coordinates(); 1746 1897 vec coordinates2 = ((vertex*)(*(++current_polyhedron->children.begin())))->get_coordinates(); 1747 1898 1899 // For computation of the scalar product with the SPLIT condition, we need extended coordinates 1748 1900 vec extended_coord2 = coordinates2; 1749 1901 extended_coord2.ins(0,-1.0); 1750 1902 1903 // We compute the parameter t an element of (0,1) describing where the segment is cut 1751 1904 double t = (-toadd*extended_coord2)/(toadd(1,toadd.size()-1)*(coordinates1-coordinates2)); 1752 1905 1906 // And compute the coordinates as convex sum of the coordinates 1753 1907 vec new_coordinates = (1-t)*coordinates2+t*coordinates1; 1754 1908 1755 1909 // cout << "c1:" << coordinates1 << endl << "c2:" << coordinates2 << endl << "nc:" << new_coordinates << endl; 1756 1910 1911 // We create a new vertex object 1757 1912 vertex* neutral_vertex = new vertex(new_coordinates); 1758 1913 1914 // and assign it to the new totally neutral child 1759 1915 new_totally_neutral_child = neutral_vertex; 1760 1916 } 1761 1917 else 1762 1918 { 1919 // If the split polyhedron isn't a segment, the totally neutral child will be a general 1920 // polyhedron. Because a toprow inherits from polyhedron, we make it a toprow for further 1921 // universality \TODO: is this really needed? 1763 1922 toprow* neutral_toprow = new toprow(); 1764 1923 1924 // A toprow needs a valid condition 1765 1925 neutral_toprow->condition_sum = ((toprow*)current_polyhedron)->condition_sum; // tohle tu bylo driv: zeros(number_of_parameters+1); 1766 1926 neutral_toprow->condition_order = ((toprow*)current_polyhedron)->condition_order+1; 1767 1927 1928 // We assign it to the totally neutral child variable 1768 1929 new_totally_neutral_child = neutral_toprow; 1769 1930 } 1770 1931 1932 // We assign current SPLIT condition as a parent condition of the totally neutral child and also 1933 // the child inherits all the parent conditions of the split polyhedron 1771 1934 new_totally_neutral_child->parentconditions.insert(current_polyhedron->parentconditions.begin(),current_polyhedron->parentconditions.end()); 1772 1935 new_totally_neutral_child->parentconditions.insert(condition_to_add); 1773 1936 1937 // The totally neutral child is a polyhedron belonging to my_emlig distribution 1774 1938 new_totally_neutral_child->my_emlig = this; 1775 1939 1940 // We connect the totally neutral child to all totally neutral grandchildren of the polyhedron 1941 // being split. This is what we need the totally neutral grandchildren for. It complicates the 1942 // algorithm, because it is a second level dependence (opposed to the children <-> parents 1943 // relations, but it is needed.) 1776 1944 new_totally_neutral_child->children.insert(new_totally_neutral_child->children.end(), 1777 1945 current_polyhedron->totallyneutralgrandchildren.begin(), 1778 1946 current_polyhedron->totallyneutralgrandchildren.end()); 1779 1947 1780 1781 1782 // cout << ((toprow*)current_polyhedron)->condition << endl << toadd << endl; 1948 // We also create the reciprocal connection from the totally neutral grandchildren to the 1949 // new totally neutral child and add all the vertices of the totally neutral grandchildren 1950 // to the set of vertices of the new totally neutral child. 1951 for(set<polyhedron*>::iterator grand_ref = current_polyhedron->totallyneutralgrandchildren.begin(); grand_ref != current_polyhedron->totallyneutralgrandchildren.end();grand_ref++) 1952 { 1953 // parent connection 1954 (*grand_ref)->parents.push_back(new_totally_neutral_child); 1955 1956 // vertices 1957 new_totally_neutral_child->vertices.insert((*grand_ref)->vertices.begin(),(*grand_ref)->vertices.end()); 1958 } 1959 1960 // We create a condition sum for the split parts of the split polyhedron 1783 1961 vec cur_pos_condition = ((toprow*)current_polyhedron)->condition_sum; 1784 1962 vec cur_neg_condition = ((toprow*)current_polyhedron)->condition_sum; 1785 1963 1964 // If the split polyhedron is a toprow, we update the condition sum with the use of the SPLIT 1965 // condition. The classification of the intermediate row polyhedrons as toprows probably isn't 1966 // necessary and it could be changed for more elegance, but it is here for historical reasons. 1786 1967 if(k == number_of_parameters) 1787 1968 { … … 1790 1971 } 1791 1972 1973 // We create the positive and negative parts of the split polyhedron completely from scratch, 1974 // using the condition sum constructed earlier. This is different from the merging part, where 1975 // we have reused one of the parts to create the merged entity. This way, we don't have to 1976 // clean up old information from the split parts and the operation will be more symetrical. 1792 1977 toprow* positive_poly = new toprow(cur_pos_condition, ((toprow*)current_polyhedron)->condition_order+1); 1793 1978 toprow* negative_poly = new toprow(cur_neg_condition, ((toprow*)current_polyhedron)->condition_order+1); 1794 1979 1980 // Set the new SPLIT positive and negative parts of the split polyhedrons as parents of the new 1981 // totally neutral child 1982 new_totally_neutral_child->parents.push_back(positive_poly); 1983 new_totally_neutral_child->parents.push_back(negative_poly); 1984 1985 // and the new totally neutral child as a child of the SPLIT positive and negative parts 1986 // of the split polyhedron 1987 positive_poly->children.push_back(new_totally_neutral_child); 1988 negative_poly->children.push_back(new_totally_neutral_child); 1989 1990 // The new polyhedrons belong to my_emlig 1795 1991 positive_poly->my_emlig = this; 1796 1992 negative_poly->my_emlig = this; 1797 1993 1994 // Parent conditions of the new polyhedrons are the same as parent conditions of the split polyhedron 1798 1995 positive_poly->parentconditions.insert(current_polyhedron->parentconditions.begin(),current_polyhedron->parentconditions.end()); 1799 1996 negative_poly->parentconditions.insert(current_polyhedron->parentconditions.begin(),current_polyhedron->parentconditions.end()); 1800 1997 1801 for(set<polyhedron*>::iterator grand_ref = current_polyhedron->totallyneutralgrandchildren.begin(); grand_ref != current_polyhedron->totallyneutralgrandchildren.end();grand_ref++) 1802 { 1803 (*grand_ref)->parents.push_back(new_totally_neutral_child); 1804 1805 // tohle tu nebylo. ma to tu byt? 1806 //positive_poly->totallyneutralgrandchildren.insert(*grand_ref); 1807 //negative_poly->totallyneutralgrandchildren.insert(*grand_ref); 1808 1809 //(*grand_ref)->grandparents.insert(positive_poly); 1810 //(*grand_ref)->grandparents.insert(negative_poly); 1811 1812 new_totally_neutral_child->vertices.insert((*grand_ref)->vertices.begin(),(*grand_ref)->vertices.end()); 1813 } 1814 1815 positive_poly->children.push_back(new_totally_neutral_child); 1816 negative_poly->children.push_back(new_totally_neutral_child); 1817 1818 1998 // We loop through the parents of the split polyhedron 1819 1999 for(list<polyhedron*>::iterator parent_ref = current_polyhedron->parents.begin();parent_ref!=current_polyhedron->parents.end();parent_ref++) 1820 2000 { 1821 (*parent_ref)->totallyneutralgrandchildren.insert(new_totally_neutral_child); 1822 // new_totally_neutral_child->grandparents.insert(*parent_ref); 1823 2001 // We set the new totally neutral child to be a totally neutral grandchild of the parent 2002 (*parent_ref)->totallyneutralgrandchildren.insert(new_totally_neutral_child); 2003 2004 // We remove the split polyhedron from both lists, where it should be present 1824 2005 (*parent_ref)->neutralchildren.remove(current_polyhedron); 1825 2006 (*parent_ref)->children.remove(current_polyhedron); 1826 2007 2008 // And instead set the newly created SPLIT negative and positive parts as children of 2009 // the parent (maybe the parent will be split once we get to treating its row, but that 2010 // should be taken care of later) and we add it to the classified positive and negative 2011 // children list accordingly. 1827 2012 (*parent_ref)->children.push_back(positive_poly); 1828 2013 (*parent_ref)->children.push_back(negative_poly); … … 1831 2016 } 1832 2017 2018 // Here we set the reciprocal connections to the ones set in the previous list. All the parents 2019 // of currently split polyhedron are added as parents of the SPLIT negative and positive parts. 2020 2021 // for positive part.. 1833 2022 positive_poly->parents.insert(positive_poly->parents.end(), 1834 2023 current_polyhedron->parents.begin(), 1835 2024 current_polyhedron->parents.end()); 1836 2025 // for negative part.. 1837 2026 negative_poly->parents.insert(negative_poly->parents.end(), 1838 2027 current_polyhedron->parents.begin(), 1839 current_polyhedron->parents.end()); 1840 1841 1842 1843 new_totally_neutral_child->parents.push_back(positive_poly); 1844 new_totally_neutral_child->parents.push_back(negative_poly); 1845 2028 current_polyhedron->parents.end()); 2029 2030 // We loop through the positive children of the split polyhedron, remove it from their parents 2031 // lists and add the SPLIT positive part as their parent. 1846 2032 for(list<polyhedron*>::iterator child_ref = current_polyhedron->positivechildren.begin();child_ref!=current_polyhedron->positivechildren.end();child_ref++) 1847 2033 { … … 1850 2036 } 1851 2037 2038 // And again we set the reciprocal connections from the SPLIT positive part by adding 2039 // all the positive children of the split polyhedron to its list of children. 1852 2040 positive_poly->children.insert(positive_poly->children.end(), 1853 2041 current_polyhedron->positivechildren.begin(), 1854 2042 current_polyhedron->positivechildren.end()); 1855 2043 2044 // We loop through the negative children of the split polyhedron, remove it from their parents 2045 // lists and add the SPLIT negative part as their parent. 1856 2046 for(list<polyhedron*>::iterator child_ref = current_polyhedron->negativechildren.begin();child_ref!=current_polyhedron->negativechildren.end();child_ref++) 1857 2047 { … … 1860 2050 } 1861 2051 2052 // And again we set the reciprocal connections from the SPLIT negative part by adding 2053 // all the negative children of the split polyhedron to its list of children. 1862 2054 negative_poly->children.insert(negative_poly->children.end(), 1863 2055 current_polyhedron->negativechildren.begin(), 1864 2056 current_polyhedron->negativechildren.end()); 1865 2057 2058 // The vertices of the SPLIT positive part are the union of positive and neutral vertices of 2059 // the split polyhedron and vertices of the new neutral child 1866 2060 positive_poly->vertices.insert(current_polyhedron->positiveneutralvertices.begin(),current_polyhedron->positiveneutralvertices.end()); 1867 2061 positive_poly->vertices.insert(new_totally_neutral_child->vertices.begin(),new_totally_neutral_child->vertices.end()); 1868 2062 2063 // The vertices of the SPLIT negative part are the union of negative and neutral vertices of 2064 // the split polyhedron and vertices of the new neutral child 1869 2065 negative_poly->vertices.insert(current_polyhedron->negativeneutralvertices.begin(),current_polyhedron->negativeneutralvertices.end()); 1870 2066 negative_poly->vertices.insert(new_totally_neutral_child->vertices.begin(),new_totally_neutral_child->vertices.end()); 1871 2067 2068 // Triangulate the new totally neutral child without computing its normalization intergral 2069 // (because the child is never a toprow polyhedron) 1872 2070 new_totally_neutral_child->triangulate(false); 1873 2071 2072 // Triangulate the new SPLIT positive and negative parts of the split polyhedron and compute 2073 // their normalization integral if they are toprow polyhedrons 1874 2074 normalization_factor += positive_poly->triangulate(k==for_splitting.size()-1); 1875 2075 normalization_factor += negative_poly->triangulate(k==for_splitting.size()-1); 1876 2076 1877 statistic.append_polyhedron(k-1, new_totally_neutral_child); 1878 1879 1880 2077 // Insert all the newly created polyhedrons into the Hasse diagram 2078 statistic.append_polyhedron(k-1, new_totally_neutral_child); 1881 2079 statistic.insert_polyhedron(k, positive_poly, current_polyhedron); 1882 2080 statistic.insert_polyhedron(k, negative_poly, current_polyhedron); 1883 2081 2082 // and delete the split polyhedron from the diagram 1884 2083 statistic.delete_polyhedron(k, current_polyhedron); 1885 2084 2085 // and also delete its object from the memory 1886 2086 delete current_polyhedron; 1887 2087 } 1888 2088 2089 // Goto a higher row of the for_splitting list 1889 2090 k++; 1890 2091 } … … 1905 2106 // cout << "Normalization factor: " << normalization_factor << endl; 1906 2107 1907 log_nc = log(normalization_factor); // + logfact(condition_order-number_of_parameters-2)-(condition_order-number_of_parameters-2)*log(2.0); 1908 1909 /* 1910 if(condition_order == 20) 1911 step_me(88); 1912 */ 1913 1914 //cout << "Factorial factor: " << condition_order-number_of_parameters-2 << endl; 1915 1916 /* 1917 cout << "part1: " << log(normalization_factor) << endl; 1918 cout << "part2: " << logfact(condition_order-number_of_parameters-2) << endl; 1919 pause(1); 1920 */ 1921 2108 last_log_nc = log_nc; 2109 log_nc = log(normalization_factor); 1922 2110 1923 2111 /* … … 1929 2117 1930 2118 // step_me(101); 1931 2119 } 2120 2121 double _ll() 2122 { 2123 if(last_log_nc!=NULL) 2124 { 2125 return log_nc - last_log_nc; 2126 } 2127 else 2128 { 2129 throw new exception("You can not ask for log likelihood difference for a prior!"); 2130 } 1932 2131 } 1933 2132 … … 2489 2688 2490 2689 /// A method for creating plain default statistic representing only the range of the parameter space. 2491 void create_statistic(int number_of_parameters, double soft_prior_parameter)2690 void create_statistic(int number_of_parameters, double alpha_deviation, double sigma_deviation) 2492 2691 { 2493 2692 /* … … 2627 2826 { 2628 2827 vec1 = ((toprow*)horiz_ref)->condition_sum; 2629 vec1.ins(vec1.size(),- soft_prior_parameter);2828 vec1.ins(vec1.size(),-alpha_deviation); 2630 2829 2631 2830 vec2 = ((toprow*)horiz_ref)->condition_sum; 2632 vec2.ins(vec2.size(), soft_prior_parameter);2831 vec2.ins(vec2.size(),alpha_deviation); 2633 2832 } 2634 2833 else 2635 2834 { 2636 vec1.ins(0,- soft_prior_parameter);2637 vec2.ins(0, soft_prior_parameter);2638 2639 vec1.ins(0, 0);2640 vec2.ins(0, 0);2835 vec1.ins(0,-alpha_deviation); 2836 vec2.ins(0,alpha_deviation); 2837 2838 vec1.ins(0,-sigma_deviation); 2839 vec2.ins(0,-sigma_deviation); 2641 2840 } 2642 2841 … … 2816 3015 emlig* posterior; 2817 3016 2818 RARX(int number_of_parameters, const int window_size, bool has_constant )//:BM()3017 RARX(int number_of_parameters, const int window_size, bool has_constant, double alpha_deviation, double sigma_deviation, int nu)//:BM() 2819 3018 { 2820 3019 this->has_constant = has_constant; 2821 3020 2822 posterior = new emlig(number_of_parameters,0.141); 3021 posterior = new emlig(number_of_parameters,alpha_deviation,sigma_deviation,nu); 3022 3023 this->window_size = window_size; 3024 }; 3025 3026 RARX(int number_of_parameters, const int window_size, bool has_constant)//:BM() 3027 { 3028 this->has_constant = has_constant; 3029 3030 posterior = new emlig(number_of_parameters,1.0,1.0,number_of_parameters+3); 2823 3031 2824 3032 this->window_size = window_size;